diff --git a/.editorconfig b/.editorconfig
new file mode 100644
index 0000000..d7d9cae
--- /dev/null
+++ b/.editorconfig
@@ -0,0 +1,13 @@
+# EditorConfig is awesome: https://EditorConfig.org
+
+root = true
+
+[*]
+end_of_line = lf
+insert_final_newline = true
+trim_trailing_whitespace = true
+
+[*.py]
+charset = utf-8
+indent_style = space
+indent_size = 4
diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
new file mode 100644
index 0000000..befa060
--- /dev/null
+++ b/.git-blame-ignore-revs
@@ -0,0 +1,5 @@
+# Run code through yapf
+19a821d5f1ff9079f9a40d27553182a433a27834
+
+# Run code through black
+0d9e3581d57f376865f49ae62fe9171789beca56
diff --git a/.gitignore b/.gitignore
index 029341d..767654b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,90 +1,46 @@
+#
+# OS-specific
+#
+
 .DS_Store
-# Byte-compiled / optimized / DLL files
-__pycache__/
+
+#
+# Language specific
+#
+
+# Python
 *.py[cod]
-*$py.class
-
-# C extensions
-*.so
-
-# Distribution / packaging
-.Python
-env/
-build/
-develop-eggs/
-dist/
-downloads/
-eggs/
-.eggs/
-lib/
-lib64/
-parts/
-sdist/
-var/
 *.egg-info/
-.installed.cfg
-*.egg
-
-# PyInstaller
-#  Usually these files are written by a python script from a template
-#  before PyInstaller builds the exe, so as to inject date/other infos into it.
-*.manifest
-*.spec
-
-# Installer logs
-pip-log.txt
-pip-delete-this-directory.txt
-
-# Unit test / coverage reports
-htmlcov/
-.tox/
-.coverage
-.coverage.*
-.cache
-nosetests.xml
-coverage.xml
-*,cover
-.hypothesis/
-
-# Translations
-*.mo
-*.pot
-
-# Django stuff:
-*.log
-local_settings.py
-
-# Sphinx documentation
-docs/_build/
-
-# PyBuilder
-target/
-
-#Ipython Notebook
-.ipynb_checkpoints
-
-# pyenv
-.python-version
-
-# PyCharm
-.idea/
-
-# IntelliJ
-*.iml
-
-# VSCode
-/.vscode
-
-# Python virtual environment
+/build/
 /.venv
 
-# antlion configuration files
+#
+# Editors
+#
+
+/.idea/
+/.vscode/
+
+#
+# antlion
+#
+
+# Configuration
 /*.json
 /*.yaml
 /config/
 
-# antlion runtime files
+# Generated during run-time
 /logs
 
 # Local development scripts
 /*.sh
+
+#
+# third_party
+#
+
+/third_party/*
+!/third_party/github.com/
+!/third_party/github.com/jd/tenacity
+/third_party/github.com/jd/tenacity/src
diff --git a/BUILD.gn b/BUILD.gn
new file mode 100644
index 0000000..f2aab56
--- /dev/null
+++ b/BUILD.gn
@@ -0,0 +1,228 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Declare Fuchsia build targets for using antlion from the Fuchsia tree.
+# Requires additional configuration of jiri fetch attributes from your Fuchsia
+# checkout:
+#   `jiri init -fetch-optional=antlion`
+
+import("//build/python/python_library.gni")
+
+# Tests for full build validation
+group("e2e_tests") {
+  testonly = true
+  public_deps = [ "src/antlion/tests:e2e_tests" ]
+}
+
+# Subset of tests to validate builds in under 15 minutes.
+group("e2e_tests_quick") {
+  testonly = true
+  public_deps = [ "src/antlion/tests:e2e_tests_quick" ]
+}
+
+# Tests for at-desk custom validation
+group("e2e_tests_manual") {
+  testonly = true
+  public_deps = [ "src/antlion/tests:e2e_tests_manual" ]
+}
+
+# deprecated: prefer e2e_tests_quick
+group("smoke_tests") {
+  testonly = true
+  public_deps = [ ":e2e_tests_quick" ]
+}
+
+# Unit tests only
+group("tests") {
+  testonly = true
+  public_deps = [ "runner:tests" ]
+}
+
+python_library("antlion") {
+  source_root = "//third_party/antlion/src/antlion"
+  sources = [
+    "__init__.py",
+    "base_test.py",
+    "bin/__init__.py",
+    "bin/act.py",
+    "capabilities/__init__.py",
+    "capabilities/ssh.py",
+    "config_parser.py",
+    "context.py",
+    "controllers/__init__.py",
+    "controllers/access_point.py",
+    "controllers/adb.py",
+    "controllers/adb_lib/__init__.py",
+    "controllers/adb_lib/error.py",
+    "controllers/android_device.py",
+    "controllers/android_lib/__init__.py",
+    "controllers/android_lib/errors.py",
+    "controllers/android_lib/events.py",
+    "controllers/android_lib/logcat.py",
+    "controllers/android_lib/services.py",
+    "controllers/android_lib/tel/__init__.py",
+    "controllers/android_lib/tel/tel_utils.py",
+    "controllers/ap_lib/__init__.py",
+    "controllers/ap_lib/ap_get_interface.py",
+    "controllers/ap_lib/ap_iwconfig.py",
+    "controllers/ap_lib/bridge_interface.py",
+    "controllers/ap_lib/dhcp_config.py",
+    "controllers/ap_lib/dhcp_server.py",
+    "controllers/ap_lib/extended_capabilities.py",
+    "controllers/ap_lib/hostapd.py",
+    "controllers/ap_lib/hostapd_ap_preset.py",
+    "controllers/ap_lib/hostapd_bss_settings.py",
+    "controllers/ap_lib/hostapd_config.py",
+    "controllers/ap_lib/hostapd_constants.py",
+    "controllers/ap_lib/hostapd_security.py",
+    "controllers/ap_lib/hostapd_utils.py",
+    "controllers/ap_lib/radio_measurement.py",
+    "controllers/ap_lib/radvd.py",
+    "controllers/ap_lib/radvd_config.py",
+    "controllers/ap_lib/radvd_constants.py",
+    "controllers/ap_lib/third_party_ap_profiles/__init__.py",
+    "controllers/ap_lib/third_party_ap_profiles/actiontec.py",
+    "controllers/ap_lib/third_party_ap_profiles/asus.py",
+    "controllers/ap_lib/third_party_ap_profiles/belkin.py",
+    "controllers/ap_lib/third_party_ap_profiles/linksys.py",
+    "controllers/ap_lib/third_party_ap_profiles/netgear.py",
+    "controllers/ap_lib/third_party_ap_profiles/securifi.py",
+    "controllers/ap_lib/third_party_ap_profiles/tplink.py",
+    "controllers/ap_lib/wireless_network_management.py",
+    "controllers/attenuator.py",
+    "controllers/attenuator_lib/__init__.py",
+    "controllers/attenuator_lib/_tnhelper.py",
+    "controllers/attenuator_lib/aeroflex/__init__.py",
+    "controllers/attenuator_lib/aeroflex/telnet.py",
+    "controllers/attenuator_lib/minicircuits/__init__.py",
+    "controllers/attenuator_lib/minicircuits/http.py",
+    "controllers/attenuator_lib/minicircuits/telnet.py",
+    "controllers/fastboot.py",
+    "controllers/fuchsia_device.py",
+    "controllers/fuchsia_lib/__init__.py",
+    "controllers/fuchsia_lib/base_lib.py",
+    "controllers/fuchsia_lib/device_lib.py",
+    "controllers/fuchsia_lib/ffx.py",
+    "controllers/fuchsia_lib/hardware_power_statecontrol_lib.py",
+    "controllers/fuchsia_lib/lib_controllers/__init__.py",
+    "controllers/fuchsia_lib/lib_controllers/netstack_controller.py",
+    "controllers/fuchsia_lib/lib_controllers/wlan_controller.py",
+    "controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py",
+    "controllers/fuchsia_lib/location/__init__.py",
+    "controllers/fuchsia_lib/location/regulatory_region_lib.py",
+    "controllers/fuchsia_lib/logging_lib.py",
+    "controllers/fuchsia_lib/netstack/__init__.py",
+    "controllers/fuchsia_lib/netstack/netstack_lib.py",
+    "controllers/fuchsia_lib/package_server.py",
+    "controllers/fuchsia_lib/sl4f.py",
+    "controllers/fuchsia_lib/ssh.py",
+    "controllers/fuchsia_lib/utils_lib.py",
+    "controllers/fuchsia_lib/wlan_ap_policy_lib.py",
+    "controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py",
+    "controllers/fuchsia_lib/wlan_lib.py",
+    "controllers/fuchsia_lib/wlan_policy_lib.py",
+    "controllers/iperf_client.py",
+    "controllers/iperf_server.py",
+    "controllers/openwrt_ap.py",
+    "controllers/openwrt_lib/__init__.py",
+    "controllers/openwrt_lib/network_const.py",
+    "controllers/openwrt_lib/network_settings.py",
+    "controllers/openwrt_lib/openwrt_constants.py",
+    "controllers/openwrt_lib/wireless_config.py",
+    "controllers/openwrt_lib/wireless_settings_applier.py",
+    "controllers/pdu.py",
+    "controllers/pdu_lib/__init__.py",
+    "controllers/pdu_lib/digital_loggers/__init__.py",
+    "controllers/pdu_lib/digital_loggers/webpowerswitch.py",
+    "controllers/pdu_lib/synaccess/__init__.py",
+    "controllers/pdu_lib/synaccess/np02b.py",
+    "controllers/sl4a_lib/__init__.py",
+    "controllers/sl4a_lib/error_reporter.py",
+    "controllers/sl4a_lib/event_dispatcher.py",
+    "controllers/sl4a_lib/rpc_client.py",
+    "controllers/sl4a_lib/rpc_connection.py",
+    "controllers/sl4a_lib/sl4a_manager.py",
+    "controllers/sl4a_lib/sl4a_ports.py",
+    "controllers/sl4a_lib/sl4a_session.py",
+    "controllers/sl4a_lib/sl4a_types.py",
+    "controllers/sniffer.py",
+    "controllers/sniffer_lib/__init__.py",
+    "controllers/sniffer_lib/local/__init__.py",
+    "controllers/sniffer_lib/local/local_base.py",
+    "controllers/sniffer_lib/local/tcpdump.py",
+    "controllers/sniffer_lib/local/tshark.py",
+    "controllers/utils_lib/__init__.py",
+    "controllers/utils_lib/commands/__init__.py",
+    "controllers/utils_lib/commands/ip.py",
+    "controllers/utils_lib/commands/route.py",
+    "controllers/utils_lib/commands/shell.py",
+    "controllers/utils_lib/host_utils.py",
+    "controllers/utils_lib/ssh/__init__.py",
+    "controllers/utils_lib/ssh/connection.py",
+    "controllers/utils_lib/ssh/formatter.py",
+    "controllers/utils_lib/ssh/settings.py",
+    "dict_object.py",
+    "error.py",
+    "event/__init__.py",
+    "event/decorators.py",
+    "event/event.py",
+    "event/event_bus.py",
+    "event/event_subscription.py",
+    "event/subscription_bundle.py",
+    "event/subscription_handle.py",
+    "keys.py",
+    "libs/__init__.py",
+    "libs/logging/__init__.py",
+    "libs/logging/log_stream.py",
+    "libs/ota/__init__.py",
+    "libs/ota/ota_runners/__init__.py",
+    "libs/ota/ota_runners/ota_runner.py",
+    "libs/ota/ota_runners/ota_runner_factory.py",
+    "libs/ota/ota_tools/__init__.py",
+    "libs/ota/ota_tools/adb_sideload_ota_tool.py",
+    "libs/ota/ota_tools/ota_tool.py",
+    "libs/ota/ota_tools/ota_tool_factory.py",
+    "libs/ota/ota_tools/update_device_ota_tool.py",
+    "libs/ota/ota_updater.py",
+    "libs/proc/__init__.py",
+    "libs/proc/job.py",
+    "libs/proc/process.py",
+    "libs/yaml_writer.py",
+    "logger.py",
+    "net.py",
+    "records.py",
+    "signals.py",
+    "test_decorators.py",
+    "test_runner.py",
+    "test_utils/__init__.py",
+    "test_utils/abstract_devices/__init__.py",
+    "test_utils/abstract_devices/wlan_device.py",
+    "test_utils/abstract_devices/wmm_transceiver.py",
+    "test_utils/dhcp/__init__.py",
+    "test_utils/dhcp/base_test.py",
+    "test_utils/fuchsia/__init__.py",
+    "test_utils/fuchsia/utils.py",
+    "test_utils/fuchsia/wmm_test_cases.py",
+    "test_utils/net/__init__.py",
+    "test_utils/net/connectivity_const.py",
+    "test_utils/net/net_test_utils.py",
+    "test_utils/wifi/__init__.py",
+    "test_utils/wifi/base_test.py",
+    "test_utils/wifi/wifi_constants.py",
+    "test_utils/wifi/wifi_performance_test_utils/__init__.py",
+    "test_utils/wifi/wifi_performance_test_utils/bokeh_figure.py",
+    "test_utils/wifi/wifi_performance_test_utils/brcm_utils.py",
+    "test_utils/wifi/wifi_performance_test_utils/ping_utils.py",
+    "test_utils/wifi/wifi_performance_test_utils/qcom_utils.py",
+    "test_utils/wifi/wifi_power_test_utils.py",
+    "test_utils/wifi/wifi_test_utils.py",
+    "tracelogger.py",
+    "utils.py",
+  ]
+  library_deps = [
+    "//third_party/mobly",
+    "//third_party/pyyaml:yaml",
+    "third_party/github.com/jd/tenacity",
+  ]
+}
diff --git a/CHANGELOG.md b/CHANGELOG.md
index a9c7f67..248b51f 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -10,20 +10,65 @@
 
 ## [Unreleased]
 
+## 0.3.0 - 2023-05-17
+
+### Deprecated
+
+- **Support for ACTS JSON configs; instead, use Mobly YAML configs.** To
+ease this transition, upon running `act.py`, a compatible YAML config will be
+generated for you and placed next to your JSON config.
+- **The `act.py` binary; instead, invoke tests directly.** Upon running
+`act.py`, a deprecation warning will provide instructions for how to invoke
+antlion tests without act.py and with the newly generated YAML config.
+
 ### Added
 
+- Presubmit testing in [CV][CV] (aka CQ). All tests specified with the
+`qemu_env` environment will run before every antlion CL is submitted.
+- Postsubmit testing in [CI][CI]. See [Milo][builders] for an exhaustive list of
+builders.
+- [EditorConfig](https://editorconfig.org) file for consistent coding styles.
+Installing an EditorConfig plugin for your editor is highly recommended.
+
+[CV]: https://chromium.googlesource.com/infra/luci/luci-go/+/refs/heads/main/cv/README.md
+[CI]: https://chromium.googlesource.com/chromium/src/+/master/docs/tour_of_luci_ui.md
+[builders]: https://luci-milo.appspot.com/ui/search?q=antlion
+
 ### Changed
 
+- Default test execution from ACTS to Mobly. `antlion_host_test()` now invokes
+the test file directly using the Mobly test runner, rather than using `act.py`.
+  - All tests have been refactored to allow direct running with the Mobly test
+  runner.
+  - `act.py` now converts ACTS JSON config to compatible Mobly YAML config. The
+  resulting config is passed directly to Mobly's config parser. See notes for
+  this release's deprecations above.
+- Generate YAML config instead of JSON config from antlion-runner.
+- `FuchsiaDevice.authorized_file_loc` config field is now optional. This field
+is only used during `FlashTest`; it is not used when the device is already
+provisioned (e.g. when tests are dispatched in Fuchsia infrastructure).
+
 ### Removed
 
+- Unused controllers and tests (full list)
+
 ### Fixed
-
 [unreleased]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.2.0..refs/heads/main
+- Failure to stop session_manager using ffx in `WlanRebootTest` ([@patricklu],
+[bug](http://b/267330535))
+- Failure to parse 'test_name' in DHCP configuration file in `Dhcpv4InteropTest`
+(invalid option) introduced by previous refactor ([@patricklu],
+[bug](http://b/232574848))
+- Logging for `Dhcpv4InteropTest` changed to utilize a temp file instead of
+/var/log/messages to fix test error with duplicate PID log messages
+([@patricklu], [bug](http://b/232574848))
 
-## [0.2.0] - 2022-01-03
+## [0.2.0] - 2023-01-03
 
 ### Added
 
+- Added snapshots before reboot and during test teardown in `WlanRebootTest`
+([@patricklu], [bug](http://b/273923552))
 - Download radvd logs from AP for debugging IPv6 address allocation
 - Optional `wlan_features` config field to `FuchsiaDevice` for declaring which
 WLAN features the device supports, such as BSS Transition Management
diff --git a/README.md b/README.md
index be529cf..7d5950b 100644
--- a/README.md
+++ b/README.md
@@ -11,7 +11,85 @@
 [Report Bug]: http://go/conn-test-bug
 [Request Feature]: http://b/issues/new?component=1182297&template=1680893
 
-## Getting Started
+## Getting started with QEMU
+
+The quickest way to run antlion is by using the Fuchsia QEMU emulator. This
+enables antlion tests that do not require hardware-specific capabilities like
+WLAN. This is especially useful to verify if antlion builds and runs without
+syntax errors. If you require WLAN capabilities, see
+[below](#running-with-a-physical-device).
+
+1. [Checkout Fuchsia](https://fuchsia.dev/fuchsia-src/get-started/get_fuchsia_source)
+
+2. Configure and build Fuchsia to run antlion tests virtually on QEMU
+
+   ```sh
+   fx set core.qemu-x64 \
+      --with //src/testing/sl4f \
+      --with //src/sys/bin/start_sl4f \
+      --args 'core_realm_shards += [ "//src/testing/sl4f:sl4f_core_shard" ]' \
+      --with-host //third_party/antlion:e2e_tests_quick
+   fx build
+   ```
+
+3. In a separate terminal, run the emulator with networking enabled
+
+   ```sh
+   ffx emu stop && ffx emu start -H --net tap && ffx log
+   ```
+
+4. In a separate terminal, run a package server
+
+   ```sh
+   fx serve
+   ```
+
+5. Run an antlion test
+
+   ```sh
+   fx test --e2e --output //third_party/antlion/src/antlion/tests/examples:sl4f_sanity_test
+   ```
+
+## Running with a local physical device
+
+A physical device is required for most antlion tests, which rely on physical I/O
+such as WLAN and Bluetooth. Antlion is designed to make testing physical devices
+as easy, reliable, and reproducible as possible. The device will be discovered
+using mDNS, so make sure your host machine has a network connection to the
+device.
+
+1. Configure and build Fuchsia for your target with the following extra
+   arguments:
+
+   ```sh
+   fx set core.my-super-cool-product \
+      --with //src/testing/sl4f \
+      --with //src/sys/bin/start_sl4f \
+      --args='core_realm_shards += [ "//src/testing/sl4f:sl4f_core_shard" ]' \
+      --with-host //third_party/antlion:e2e_tests
+   fx build
+   ```
+
+2. Flash your device with the new build
+
+3. In a separate terminal, run a package server
+
+   ```sh
+   fx serve
+   ```
+
+4. Run an antlion test
+
+   ```sh
+   fx test --e2e --output //third_party/antlion/src/antlion/tests/functional:ping_stress_test
+   ```
+
+> Local auxiliary devices are not yet support by `antlion-runner`, which is
+> responsible for generating Mobly configs. In the meantime, see the
+> section below for manually crafting Mobly configs to support auxiliary
+> devices.
+
+## Running without a Fuchsia checkout
 
 Requires Python 3.8+
 
@@ -25,52 +103,77 @@
 
    ```sh
    cd antlion
-   python3 -m venv .venv  # creates a "virtual environment" in the `.venv` directory
-   source .venv/bin/activate  # activates the virtual environment. Run `deactivate` to exit it later
-   pip install --editable ".[dev,test]"
+   python3 -m venv .venv      # Create a virtual environment in the `.venv` directory
+   source .venv/bin/activate  # Activate the virtual environment
+   pip install --editable ".[mdns]"
+   # Run `deactivate` later to exit the virtual environment
    ```
 
 3. Write the sample config and update the Fuchsia controller to match your
    development environment
 
    ```sh
-   mkdir -p config
-   cat <<EOF > config/simple.json
-   {
-      "testbed": [{
-         "name": "simple_testbed",
-         "FuchsiaDevice": [{
-            "ip": "fuchsia-00e0-4c01-04df"
-         }]
-      }],
-      "logpath": "logs"
-   }
+   cat <<EOF > simple-config.yaml
+   TestBeds:
+   - Name: antlion-runner
+     Controllers:
+       FuchsiaDevice:
+       - ip: fuchsia-00e0-4c01-04df
+   MoblyParams:
+     LogPath: logs
    EOF
    ```
 
+   Replace `fuchsia-00e0-4c01-04df` with your device's nodename, or
+   `fuchsia-emulator` if using an emulator. The nodename can be found by looking
+   for a log similar to the one below.
+
+   ```text
+   [0.524][klog][klog][I] netsvc: nodename='fuchsia-emulator'
+   ```
+
 4. Run the sanity test
 
    ```sh
-   antlion -c config/simple.json -tc Sl4fSanityTest
+   python src/antlion/tests/examples/Sl4fSanityTest.py -c simple-config.yaml
    ```
 
-See `antlion -h` for more full usage.
-
 ## Contributing
 
-Contributions are what make open source a great place to learn, inspire, and
-create. Any contributions you make are **greatly appreciated**.
+Contributions are what make open source projects a great place to learn,
+inspire, and create. Any contributions you make are **greatly appreciated**.
+If you have a suggestion that would make this better, please create a CL.
 
-If you have a suggestion that would make this better, please create a pull
-request.
+Before contributing, additional setup is necessary:
 
-1. Create a feature branch (`git checkout -b feature/amazing-feature`)
-2. Document your change in `CHANGELOG.md`
-3. Commit changes (`git commit -m 'Add some amazing feature'`)
-4. Upload CL (`git push origin HEAD:refs/for/main`)
+- Install developer Python packages for formatting and linting
+
+  ```sh
+  pip install --editable ".[dev]"
+  ```
+
+- Install an [EditorConfig](https://editorconfig.org/) plugin for consistent
+  whitespace
+
+- Install [Black](https://pypi.org/project/black/) our preferred code formatter.
+  Optionally, add the extension to your editor.
+
+- Complete the steps in '[Contribute source changes]' to gain authorization to
+  upload CLs to Fuchsia's Gerrit.
+
+To create a CL:
+
+1. Create a branch (`git checkout -b feature/amazing-feature`)
+2. Make changes
+3. Document the changes in `CHANGELOG.md`
+4. Run your change through `Black` formatter
+5. Commit changes (`git add . && git commit -m 'Add some amazing feature'`)
+6. Upload CL (`git push origin HEAD:refs/for/main`)
 
 > A public bug tracker is not (yet) available.
 
+[Contribute source changes]: https://fuchsia.dev/fuchsia-src/development/source_code/contribute_changes#prerequisites
+
 ### Recommended git aliases
 
 There are a handful of git commands that will be commonly used throughout the
@@ -87,6 +190,13 @@
   uc = push origin HEAD:refs/for/main%l=Commit-Queue+1,l=Fuchsia-Auto-Submit+1,publish-comments,r=sbalana
 ```
 
+You may also want to add a section to ignore the project's large formatting changes:
+
+```gitconfig
+[blame]
+  ignoreRevsFile = .git-blame-ignore-revs
+```
+
 ## License
 
 Distributed under the Apache 2.0 License. See `LICENSE` for more information.
diff --git a/antlion_host_test.gni b/antlion_host_test.gni
new file mode 100644
index 0000000..96f7654
--- /dev/null
+++ b/antlion_host_test.gni
@@ -0,0 +1,159 @@
+import("//build/python/python_binary.gni")
+import("//build/rust/rustc_binary.gni")
+import("//build/testing/host_test.gni")
+import("//build/testing/host_test_data.gni")
+
+# Declares a host-side antlion test.
+#
+# Example
+#
+# ```
+# antlion_host_test("Sl4fSanityTest") {
+#   main_source = "Sl4fSanityTest.py"
+# }
+# ```
+#
+# Parameters
+#
+#  main_source
+#    The .py file defining the antlion test.
+#    Type: path
+#
+#  sources (optional)
+#    Other files that are used in the test.
+#    Type: list(path)
+#    Default: empty list
+#
+#  test_params (optional)
+#    Path to a YAML file with additional test parameters. This will be provided
+#    to the test in the antlion config under the "test_params" key.
+#    Type: string
+#
+#  extra_args (optional)
+#    Additional arguments to pass to the test.
+#    Type: list(string)
+#
+#   deps
+#   environments
+#   visibility
+template("antlion_host_test") {
+  assert(defined(invoker.main_source), "main_source is required")
+
+  #
+  # Define antlion test python_binary().
+  #
+  _python_binary_name = "${target_name}.pyz"
+  _python_binary_target = "${target_name}_python_binary"
+  python_binary(_python_binary_target) {
+    forward_variables_from(invoker,
+                           [
+                             "main_source",
+                             "sources",
+                           ])
+    output_name = _python_binary_name
+    main_callable = "test_runner.main" # Mobly-specific entry point.
+    deps = [ "//third_party/antlion" ]
+    testonly = true
+    visibility = [ ":*" ]
+  }
+
+  _test_dir = "${root_out_dir}/test_data/" + get_label_info(target_name, "dir")
+
+  #
+  # Define antlion test host_test_data().
+  #
+  _host_test_data_target = "${target_name}_test_data"
+  host_test_data(_host_test_data_target) {
+    testonly = true
+    visibility = [ ":*" ]
+    sources = [ get_label_info(":${_python_binary_target}", "target_out_dir") +
+                "/${_python_binary_name}" ]
+    outputs = [ "${_test_dir}/${_python_binary_name}" ]
+    deps = [ ":${_python_binary_target}" ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+  }
+
+  #
+  # Define SSH binary host_test_data().
+  #
+  _host_test_data_ssh = "${target_name}_test_data_ssh"
+  host_test_data(_host_test_data_ssh) {
+    testonly = true
+    visibility = [ ":*" ]
+    sources = [ "//prebuilt/third_party/openssh-portable/${host_os}-${host_cpu}/bin/ssh" ]
+    outputs = [ "${_test_dir}/ssh" ]
+  }
+
+  #
+  # Define Mobly test params YAML host_test_data().
+  #
+  if (defined(invoker.test_params)) {
+    _host_test_data_test_params = "${target_name}_test_data_test_params"
+    host_test_data(_host_test_data_test_params) {
+      testonly = true
+      visibility = [ ":*" ]
+      sources = [ invoker.test_params ]
+      outputs = [ "${_test_dir}/${invoker.test_params}" ]
+    }
+  }
+
+  #
+  # Define FFX binary host_test_data().
+  #
+  _host_test_data_ffx = "${target_name}_test_data_ffx"
+  host_test_data(_host_test_data_ffx) {
+    testonly = true
+    visibility = [ ":*" ]
+    sources = [ get_label_info("//src/developer/ffx", "root_out_dir") + "/ffx" ]
+    outputs = [ "${_test_dir}/ffx" ]
+    deps = [ "//src/developer/ffx:ffx_bin($host_toolchain)" ]
+  }
+
+  #
+  # Define the antlion host_test() using antlion-runner.
+  #
+  host_test(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "environments",
+                             "visibility",
+                           ])
+
+    binary_path = "${root_out_dir}/antlion-runner"
+
+    args = [
+      "--python-bin",
+      rebase_path(python_exe_src, root_build_dir),
+      "--antlion-pyz",
+      rebase_path("${_test_dir}/${_python_binary_name}", root_build_dir),
+      "--out-dir",
+      rebase_path("${_test_dir}", root_build_dir),
+      "--ffx-binary",
+      rebase_path("${_test_dir}/ffx", root_build_dir),
+      "--ssh-binary",
+      rebase_path("${_test_dir}/ssh", root_build_dir),
+    ]
+
+    deps = [
+      ":${_host_test_data_ffx}",
+      ":${_host_test_data_ssh}",
+      ":${_host_test_data_target}",
+      "//build/python:interpreter",
+      "//third_party/antlion/runner",
+    ]
+
+    if (defined(invoker.test_params)) {
+      args += [
+        "--test-params",
+        rebase_path("${_test_dir}/${invoker.test_params}", root_build_dir),
+      ]
+      deps += [ ":${_host_test_data_test_params}" ]
+    }
+
+    if (defined(invoker.extra_args)) {
+      args += invoker.extra_args
+    }
+  }
+}
diff --git a/environments.gni b/environments.gni
new file mode 100644
index 0000000..2bdfb53
--- /dev/null
+++ b/environments.gni
@@ -0,0 +1,141 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+antlion_astro_env = {
+  dimensions = {
+    device_type = "Astro"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+antlion_sherlock_env = {
+  dimensions = {
+    device_type = "Sherlock"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+antlion_nelson_env = {
+  dimensions = {
+    device_type = "Nelson"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+astro_ap_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Astro"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+astro_ap_iperf_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Astro"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+astro_ap_iperf_attenuator_env = {
+  dimensions = {
+    access_points = "1"
+    attenuators = "1"
+    device_type = "Astro"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+sherlock_ap_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Sherlock"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+sherlock_ap_iperf_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Sherlock"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+sherlock_ap_iperf_attenuator_env = {
+  dimensions = {
+    access_points = "1"
+    attenuators = "1"
+    device_type = "Sherlock"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+nelson_ap_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Nelson"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+nelson_ap_iperf_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Nelson"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+nelson_ap_iperf_attenuator_env = {
+  dimensions = {
+    access_points = "1"
+    attenuators = "1"
+    device_type = "Nelson"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+display_envs = [
+  antlion_astro_env,
+  antlion_sherlock_env,
+  antlion_nelson_env,
+]
+
+display_ap_envs = [
+  astro_ap_env,
+  sherlock_ap_env,
+  nelson_ap_env,
+]
+
+display_ap_iperf_envs = [
+  astro_ap_iperf_env,
+  sherlock_ap_iperf_env,
+  nelson_ap_iperf_env,
+]
+
+display_ap_iperf_attenuator_envs = [
+  astro_ap_iperf_attenuator_env,
+  sherlock_ap_iperf_attenuator_env,
+  nelson_ap_iperf_attenuator_env,
+]
diff --git a/pyproject.toml b/pyproject.toml
index c0fa915..b385122 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -20,25 +20,19 @@
 [project.optional-dependencies]
 # Required to support development tools
 dev = [
+    "mock",    # required for unit tests
     "shiv",    # packaging python
     "toml",    # required for pyproject.toml
     "vulture", # finds unused code
-    "yapf",    # code formatting
+    "black",   # code formatting
 ]
 digital_loggers_pdu = ["dlipower"]
-bluetooth = ["soundfile"]
 html_graphing = ["bokeh"]
 flash = ["usbinfo"]
 mdns = ["psutil", "zeroconf"]
 android = [
-    "Monsoon",
     "numpy",
-    "paramiko[ed25519]",
-    "pylibftdi",
-    "pyserial",
-    "requests",
     "scapy",
-    "scipy",
 ]
 
 [project.scripts]
@@ -49,19 +43,13 @@
 
 [tool.autoflake]
 imports = [
-    "Monsoon",
     "antlion",
     "dataclasses",
     "dlipower",
     "mobly",
     "mock",
     "numpy",
-    "paramiko",
-    "protobuf",
-    "pylibftdi",
-    "requests",
     "scapy",
-    "scipy",
     "tenacity",
     "usbinfo",
     "zeroconf",
diff --git a/runner/BUILD.gn b/runner/BUILD.gn
new file mode 100644
index 0000000..d405592
--- /dev/null
+++ b/runner/BUILD.gn
@@ -0,0 +1,50 @@
+import("//build/rust/rustc_binary.gni")
+import("//build/rust/rustc_test.gni")
+
+rustc_binary("runner") {
+  output_name = "antlion-runner"
+  edition = "2021"
+  with_unit_tests = true
+
+  deps = [
+    "//src/developer/ffx/lib/netext:lib($host_toolchain)",
+    "//src/lib/mdns/rust:mdns",
+    "//src/lib/network/packet",
+    "//third_party/rust_crates:anyhow",
+    "//third_party/rust_crates:argh",
+    "//third_party/rust_crates:home",
+    "//third_party/rust_crates:itertools",
+    "//third_party/rust_crates:lazy_static",
+    "//third_party/rust_crates:nix",
+    "//third_party/rust_crates:serde",
+    "//third_party/rust_crates:serde_json",
+    "//third_party/rust_crates:serde_yaml",
+    "//third_party/rust_crates:socket2",
+    "//third_party/rust_crates:thiserror",
+  ]
+
+  test_deps = [
+    "//third_party/rust_crates:assert_matches",
+    "//third_party/rust_crates:indoc",
+    "//third_party/rust_crates:pretty_assertions",
+    "//third_party/rust_crates:tempfile",
+  ]
+
+  sources = [
+    "src/config.rs",
+    "src/driver/infra.rs",
+    "src/driver/local.rs",
+    "src/driver/mod.rs",
+    "src/env.rs",
+    "src/finder.rs",
+    "src/main.rs",
+    "src/net.rs",
+    "src/runner.rs",
+    "src/yaml.rs",
+  ]
+}
+
+group("tests") {
+  testonly = true
+  deps = [ ":runner_test($host_toolchain)" ]
+}
diff --git a/runner/README.md b/runner/README.md
new file mode 100644
index 0000000..45c926a
--- /dev/null
+++ b/runner/README.md
@@ -0,0 +1,42 @@
+# antlion-runner
+
+A program to run antlion locally and in infrastructure. Includes a config
+generator with mDNS discovery and sensible defaults.
+
+## Using locally with an emulator
+
+Running antlion locally with a Fuchsia emulator allows developers to perform a
+sanity checks on their changes. Running this way is very quick (~5 seconds) and
+can spot simple mistakes before code review!
+
+1. Build Fuchsia with antlion support
+   ```sh
+   jiri update -gc # if you haven't updated in a while
+   fx set workstation_eng_paused.qemu-x64 \
+      --with-host //third_party/antlion:e2e_tests \
+      --with-host //third_party/antlion:tests \
+      --with //src/testing/sl4f
+   fx build # if you haven't built in a while
+   ```
+2. Start the package server. Keep this running in the background.
+   ```sh
+   fx serve
+   ```
+3. In a separate terminal, start the emulator with access to external networks.
+   ```sh
+   ffx emu stop && ffx emu start -H --net tap && ffx log
+   ```
+4. In a separate terminal, run a test
+   ```sh
+   fx test --e2e --output //third_party/antlion:sl4f_sanity_test
+   ```
+
+> Note: Local testing with auxiliary devices is not supported by antlion runner.
+Use antlion directly for these cases; see the antlion [README](../README.md).
+
+## Testing
+
+```sh
+fx set core.qemu-x64 --with //vendor/google/build/python/antlion/runner:tests
+fx test --output //vendor/google/build/python/antlion/runner:runner_test
+```
diff --git a/runner/src/config.rs b/runner/src/config.rs
new file mode 100644
index 0000000..571a8ab
--- /dev/null
+++ b/runner/src/config.rs
@@ -0,0 +1,162 @@
+// Copyright 2022 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+use crate::net::IpAddr;
+use crate::yaml;
+
+use std::path::PathBuf;
+
+use serde::{Deserialize, Serialize};
+use serde_yaml::Value;
+
+#[derive(Clone, Debug, Serialize)]
+#[serde(rename_all = "PascalCase")]
+/// Config used by antlion for declaring testbeds and test parameters.
+pub(crate) struct Config {
+    #[serde(rename = "TestBeds")]
+    pub testbeds: Vec<Testbed>,
+    pub mobly_params: MoblyParams,
+}
+
+impl Config {
+    /// Merge the given test parameters into all testbeds.
+    pub fn merge_test_params(&mut self, test_params: Value) {
+        for testbed in self.testbeds.iter_mut() {
+            match testbed.test_params.as_mut() {
+                Some(existing) => yaml::merge(existing, test_params.clone()),
+                None => testbed.test_params = Some(test_params.clone()),
+            }
+        }
+    }
+}
+
+#[derive(Clone, Debug, Serialize)]
+#[serde(rename_all = "PascalCase")]
+/// Parameters consumed by Mobly.
+pub(crate) struct MoblyParams {
+    pub log_path: PathBuf,
+}
+
+#[derive(Clone, Debug, Serialize)]
+#[serde(rename_all = "PascalCase")]
+/// A group of interconnected devices to be used together during an antlion test.
+pub(crate) struct Testbed {
+    pub name: String,
+    pub controllers: Controllers,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub test_params: Option<Value>,
+}
+
+#[derive(Clone, Debug, Default, Serialize)]
+pub(crate) struct Controllers {
+    #[serde(rename = "FuchsiaDevice", skip_serializing_if = "Vec::is_empty")]
+    pub fuchsia_devices: Vec<Fuchsia>,
+    #[serde(rename = "AccessPoint", skip_serializing_if = "Vec::is_empty")]
+    pub access_points: Vec<AccessPoint>,
+    #[serde(rename = "Attenuator", skip_serializing_if = "Vec::is_empty")]
+    pub attenuators: Vec<Attenuator>,
+    #[serde(rename = "PduDevice", skip_serializing_if = "Vec::is_empty")]
+    pub pdus: Vec<Pdu>,
+    #[serde(rename = "IPerfServer", skip_serializing_if = "Vec::is_empty")]
+    pub iperf_servers: Vec<IPerfServer>,
+}
+
+#[derive(Clone, Debug, Serialize)]
+/// A Fuchsia device for use with antlion as defined by [fuchsia_device.py].
+///
+/// [fuchsia_device.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/src/antlion/controllers/fuchsia_device.py
+pub(crate) struct Fuchsia {
+    pub mdns_name: String,
+    pub ip: IpAddr,
+    pub take_bug_report_on_fail: bool,
+    pub ssh_binary_path: PathBuf,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub ssh_config: Option<PathBuf>,
+    pub ffx_binary_path: PathBuf,
+    pub ssh_priv_key: PathBuf,
+    #[serde(rename = "PduDevice", skip_serializing_if = "Option::is_none")]
+    pub pdu_device: Option<PduRef>,
+    pub hard_reboot_on_fail: bool,
+}
+
+#[derive(Clone, Debug, Serialize, Deserialize)]
+/// Reference to a PDU device. Used to specify which port the attached device
+/// maps to on the PDU.
+pub(crate) struct PduRef {
+    #[serde(default = "default_pdu_device")]
+    pub device: String,
+    #[serde(rename(serialize = "host"))]
+    pub ip: IpAddr,
+    pub port: u8,
+}
+
+fn default_pdu_device() -> String {
+    "synaccess.np02b".to_string()
+}
+
+#[derive(Clone, Debug, Serialize)]
+/// Declares an access point for use with antlion as defined by [access_point.py].
+///
+/// [access_point.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/src/antlion/controllers/access_point.py
+pub(crate) struct AccessPoint {
+    pub wan_interface: String,
+    pub ssh_config: SshConfig,
+    #[serde(rename = "PduDevice", skip_serializing_if = "Option::is_none")]
+    pub pdu_device: Option<PduRef>,
+    #[serde(rename = "Attenuator", skip_serializing_if = "Option::is_none")]
+    pub attenuators: Option<Vec<AttenuatorRef>>,
+}
+
+#[derive(Clone, Debug, Serialize)]
+pub(crate) struct SshConfig {
+    pub ssh_binary_path: PathBuf,
+    pub host: IpAddr,
+    pub user: String,
+    pub identity_file: PathBuf,
+}
+
+#[derive(Clone, Debug, Serialize)]
+/// Reference to an attenuator device. Used to specify which ports the attached
+/// devices' channels maps to on the attenuator.
+pub(crate) struct AttenuatorRef {
+    #[serde(rename = "Address")]
+    pub address: IpAddr,
+    #[serde(rename = "attenuator_ports_wifi_2g")]
+    pub ports_2g: Vec<u8>,
+    #[serde(rename = "attenuator_ports_wifi_5g")]
+    pub ports_5g: Vec<u8>,
+}
+
+#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
+#[serde(rename_all = "PascalCase")]
+/// Declares an attenuator for use with antlion as defined by [attenuator.py].
+///
+/// [access_point.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/src/antlion/controllers/attenuator.py
+pub(crate) struct Attenuator {
+    pub model: String,
+    pub instrument_count: u8,
+    pub address: IpAddr,
+    pub protocol: String,
+    pub port: u16,
+}
+
+#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
+/// Declares a power distribution unit for use with antlion as defined by [pdu.py].
+///
+/// [pdu.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/src/antlion/controllers/pdu.py
+pub(crate) struct Pdu {
+    pub device: String,
+    pub host: IpAddr,
+}
+
+#[derive(Clone, Debug, Serialize)]
+/// Declares an iPerf3 server for use with antlion as defined by [iperf_server.py].
+///
+/// [iperf_server.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/src/antlion/controllers/iperf_server.py
+pub(crate) struct IPerfServer {
+    pub ssh_config: SshConfig,
+    pub port: u16,
+    pub test_interface: String,
+    pub use_killall: bool,
+}
diff --git a/runner/src/driver/infra.rs b/runner/src/driver/infra.rs
new file mode 100644
index 0000000..ceff26e
--- /dev/null
+++ b/runner/src/driver/infra.rs
@@ -0,0 +1,898 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+use crate::config::PduRef;
+use crate::config::{self, Config};
+use crate::driver::Driver;
+use crate::env::Environment;
+use crate::net::IpAddr;
+use crate::yaml;
+
+use std::collections::{HashMap, HashSet};
+use std::fs;
+use std::path::{Path, PathBuf};
+
+use anyhow::{anyhow, Context, Result};
+use itertools::Itertools;
+use serde::Deserialize;
+use serde_yaml::Value;
+use thiserror::Error;
+
+const TESTBED_NAME: &'static str = "antlion-runner";
+const ENV_OUT_DIR: &'static str = "FUCHSIA_TEST_OUTDIR";
+const ENV_TESTBED_CONFIG: &'static str = "FUCHSIA_TESTBED_CONFIG";
+const TEST_SUMMARY_FILE: &'static str = "test_summary.yaml";
+
+#[derive(Debug)]
+/// Driver for running antlion on emulated and hardware testbeds hosted by
+/// Fuchsia infrastructure.
+pub(crate) struct InfraDriver {
+    output_dir: PathBuf,
+    config: Config,
+}
+
+#[derive(Error, Debug)]
+pub(crate) enum InfraDriverError {
+    #[error("infra environment not detected, \"{0}\" environment variable not present")]
+    NotDetected(String),
+    #[error(transparent)]
+    Config(#[from] ConfigError),
+    #[error(transparent)]
+    Other(#[from] anyhow::Error),
+}
+
+#[derive(Error, Debug)]
+pub(crate) enum ConfigError {
+    #[error("ip {ip} in use by several devices")]
+    DuplicateIp { ip: IpAddr },
+    #[error("ip {ip} port {port} in use by several devices")]
+    DuplicatePort { ip: IpAddr, port: u8 },
+}
+
+impl InfraDriver {
+    /// Detect an InfraDriver. Returns None if the required environmental
+    /// variables are not found.
+    pub fn new<E: Environment>(
+        env: E,
+        ssh_binary: PathBuf,
+        ffx_binary: PathBuf,
+    ) -> Result<Self, InfraDriverError> {
+        let config_path = match env.var(ENV_TESTBED_CONFIG) {
+            Ok(p) => PathBuf::from(p),
+            Err(std::env::VarError::NotPresent) => {
+                return Err(InfraDriverError::NotDetected(ENV_TESTBED_CONFIG.to_string()))
+            }
+            Err(e) => {
+                return Err(InfraDriverError::Other(anyhow!(
+                    "Failed to read \"{ENV_TESTBED_CONFIG}\" {e}"
+                )))
+            }
+        };
+        let config = fs::read_to_string(&config_path)
+            .with_context(|| format!("Failed to read \"{}\"", config_path.display()))?;
+        let targets: Vec<InfraTarget> = serde_json::from_str(&config)
+            .with_context(|| format!("Failed to parse into InfraTarget: \"{config}\""))?;
+        if targets.len() == 0 {
+            return Err(InfraDriverError::Other(anyhow!(
+                "Expected at least one target declared in \"{}\"",
+                config_path.display()
+            )));
+        }
+
+        let output_path = match env.var(ENV_OUT_DIR) {
+            Ok(p) => p,
+            Err(std::env::VarError::NotPresent) => {
+                return Err(InfraDriverError::NotDetected(ENV_OUT_DIR.to_string()))
+            }
+            Err(e) => {
+                return Err(InfraDriverError::Other(anyhow!(
+                    "Failed to read \"{ENV_OUT_DIR}\" {e}"
+                )))
+            }
+        };
+        let output_dir = PathBuf::from(output_path);
+        if !fs::metadata(&output_dir).context("Failed to stat the output directory")?.is_dir() {
+            return Err(InfraDriverError::Other(anyhow!(
+                "Expected a directory but found a file at \"{}\"",
+                output_dir.display()
+            )));
+        }
+
+        Ok(InfraDriver {
+            output_dir: output_dir.clone(),
+            config: InfraDriver::parse_targets(targets, ssh_binary, ffx_binary, output_dir)?,
+        })
+    }
+
+    fn parse_targets(
+        targets: Vec<InfraTarget>,
+        ssh_binary: PathBuf,
+        ffx_binary: PathBuf,
+        output_dir: PathBuf,
+    ) -> Result<Config, InfraDriverError> {
+        let mut fuchsia_devices: Vec<config::Fuchsia> = vec![];
+        let mut access_points: Vec<config::AccessPoint> = vec![];
+        let mut attenuators: HashMap<IpAddr, config::Attenuator> = HashMap::new();
+        let mut pdus: HashMap<IpAddr, config::Pdu> = HashMap::new();
+        let mut iperf_servers: Vec<config::IPerfServer> = vec![];
+        let mut test_params: Option<Value> = None;
+
+        let mut used_ips: HashSet<IpAddr> = HashSet::new();
+        let mut used_ports: HashMap<IpAddr, HashSet<u8>> = HashMap::new();
+
+        let mut register_ip = |ip: IpAddr| -> Result<(), InfraDriverError> {
+            if !used_ips.insert(ip.clone()) {
+                return Err(ConfigError::DuplicateIp { ip }.into());
+            }
+            Ok(())
+        };
+
+        let mut register_port = |ip: IpAddr, port: u8| -> Result<(), InfraDriverError> {
+            match used_ports.get_mut(&ip) {
+                Some(ports) => {
+                    if !ports.insert(port) {
+                        return Err(ConfigError::DuplicatePort { ip, port }.into());
+                    }
+                }
+                None => {
+                    if used_ports.insert(ip, HashSet::from([port])).is_some() {
+                        return Err(InfraDriverError::Other(anyhow!(
+                            "Used ports set was unexpectedly modified by concurrent use",
+                        )));
+                    }
+                }
+            };
+            Ok(())
+        };
+
+        let mut register_pdu = |p: Option<PduRef>| -> Result<(), InfraDriverError> {
+            if let Some(PduRef { device, ip, port }) = p {
+                register_port(ip.clone(), port)?;
+                let new = config::Pdu { device: device.clone(), host: ip.clone() };
+                if let Some(old) = pdus.insert(ip.clone(), new.clone()) {
+                    if old != new {
+                        return Err(ConfigError::DuplicateIp { ip }.into());
+                    }
+                }
+            }
+            Ok(())
+        };
+
+        let mut register_attenuator = |a: Option<AttenuatorRef>| -> Result<(), InfraDriverError> {
+            if let Some(a) = a {
+                let new = config::Attenuator {
+                    model: "minicircuits".to_string(),
+                    instrument_count: 4,
+                    address: a.ip.clone(),
+                    protocol: "http".to_string(),
+                    port: 80,
+                };
+                if let Some(old) = attenuators.insert(a.ip.clone(), new.clone()) {
+                    if old != new {
+                        return Err(ConfigError::DuplicateIp { ip: a.ip }.into());
+                    }
+                }
+            }
+            Ok(())
+        };
+
+        let mut merge_test_params = |p: Option<Value>| {
+            match (test_params.as_mut(), p) {
+                (None, Some(new)) => test_params = Some(new),
+                (Some(existing), Some(new)) => yaml::merge(existing, new),
+                (_, None) => {}
+            };
+        };
+
+        for target in targets {
+            match target {
+                InfraTarget::FuchsiaDevice { nodename, ipv4, ipv6, ssh_key, pdu, test_params } => {
+                    let ip: IpAddr = if !ipv4.is_empty() {
+                        ipv4.parse().context("Invalid IPv4 address")
+                    } else if !ipv6.is_empty() {
+                        ipv6.parse().context("Invalid IPv6 address")
+                    } else {
+                        Err(anyhow!("IP address not specified"))
+                    }?;
+
+                    fuchsia_devices.push(config::Fuchsia {
+                        mdns_name: nodename.clone(),
+                        ip: ip.clone(),
+                        take_bug_report_on_fail: true,
+                        ssh_binary_path: ssh_binary.clone(),
+                        // TODO(http://b/244747218): Remove when ssh_config is refactored away
+                        ssh_config: None,
+                        ffx_binary_path: ffx_binary.clone(),
+                        ssh_priv_key: ssh_key.clone(),
+                        pdu_device: pdu.clone(),
+                        hard_reboot_on_fail: true,
+                    });
+
+                    register_ip(ip)?;
+                    register_pdu(pdu)?;
+                    merge_test_params(test_params);
+                }
+                InfraTarget::AccessPoint { ip, attenuator, pdu, ssh_key } => {
+                    access_points.push(config::AccessPoint {
+                        wan_interface: "eth0".to_string(),
+                        ssh_config: config::SshConfig {
+                            ssh_binary_path: ssh_binary.clone(),
+                            host: ip.clone(),
+                            user: "root".to_string(),
+                            identity_file: ssh_key.clone(),
+                        },
+                        pdu_device: pdu.clone(),
+                        attenuators: attenuator.as_ref().map(|a| {
+                            vec![config::AttenuatorRef {
+                                address: a.ip.clone(),
+                                ports_2g: vec![1, 2, 3],
+                                ports_5g: vec![1, 2, 3],
+                            }]
+                        }),
+                    });
+
+                    register_ip(ip)?;
+                    register_pdu(pdu)?;
+                    register_attenuator(attenuator)?;
+                }
+                InfraTarget::IPerfServer { ip, user, test_interface, pdu, ssh_key } => {
+                    iperf_servers.push(config::IPerfServer {
+                        ssh_config: config::SshConfig {
+                            ssh_binary_path: ssh_binary.clone(),
+                            host: ip.clone(),
+                            user: user.to_string(),
+                            identity_file: ssh_key.clone(),
+                        },
+                        port: 5201,
+                        test_interface: test_interface.clone(),
+                        use_killall: true,
+                    });
+
+                    register_ip(ip.clone())?;
+                    register_pdu(pdu)?;
+                }
+            };
+        }
+
+        Ok(Config {
+            testbeds: vec![config::Testbed {
+                name: TESTBED_NAME.to_string(),
+                controllers: config::Controllers {
+                    fuchsia_devices: fuchsia_devices,
+                    access_points: access_points,
+                    attenuators: attenuators
+                        .into_values()
+                        .sorted_by_key(|a| a.address.clone())
+                        .collect(),
+                    pdus: pdus.into_values().sorted_by_key(|p| p.host.clone()).collect(),
+                    iperf_servers: iperf_servers,
+                },
+                test_params,
+            }],
+            mobly_params: config::MoblyParams { log_path: output_dir },
+        })
+    }
+}
+
+impl Driver for InfraDriver {
+    fn output_path(&self) -> &Path {
+        self.output_dir.as_path()
+    }
+    fn config(&self) -> Config {
+        self.config.clone()
+    }
+    fn teardown(&self) -> Result<()> {
+        let results_path =
+            self.output_dir.join(TESTBED_NAME).join("latest").join(TEST_SUMMARY_FILE);
+        match fs::File::open(&results_path) {
+            Ok(mut results) => {
+                println!("\nTest results from {}\n", results_path.display());
+                println!("[=====MOBLY RESULTS=====]");
+                std::io::copy(&mut results, &mut std::io::stdout())
+                    .context("Failed to copy results to stdout")?;
+            }
+            Err(e) => eprintln!("Failed to open \"{}\": {}", results_path.display(), e),
+        };
+
+        // Remove any symlinks from the output directory; this causes errors
+        // while uploading to CAS.
+        //
+        // TODO: Remove when the fix is released and supported on Swarming bots
+        // https://github.com/bazelbuild/remote-apis-sdks/pull/229.
+        remove_symlinks(self.output_dir.clone())?;
+
+        Ok(())
+    }
+}
+
+fn remove_symlinks<P: AsRef<Path>>(path: P) -> Result<()> {
+    let meta = fs::symlink_metadata(path.as_ref())?;
+    if meta.is_symlink() {
+        fs::remove_file(path)?;
+    } else if meta.is_dir() {
+        for entry in fs::read_dir(path)? {
+            remove_symlinks(entry?.path())?;
+        }
+    }
+    Ok(())
+}
+
+#[derive(Debug, Deserialize)]
+#[serde(tag = "type")]
+/// Schema used to communicate target information from the test environment set
+/// up by botanist.
+///
+/// See https://cs.opensource.google/fuchsia/fuchsia/+/main:tools/botanist/README.md
+enum InfraTarget {
+    FuchsiaDevice {
+        nodename: String,
+        ipv4: String,
+        ipv6: String,
+        ssh_key: PathBuf,
+        pdu: Option<PduRef>,
+        test_params: Option<Value>,
+    },
+    AccessPoint {
+        ip: IpAddr,
+        ssh_key: PathBuf,
+        attenuator: Option<AttenuatorRef>,
+        pdu: Option<PduRef>,
+    },
+    IPerfServer {
+        ip: IpAddr,
+        ssh_key: PathBuf,
+        #[serde(default = "default_iperf_user")]
+        user: String,
+        test_interface: String,
+        pdu: Option<PduRef>,
+    },
+}
+
+fn default_iperf_user() -> String {
+    "pi".to_string()
+}
+
+#[derive(Clone, Debug, Deserialize)]
+struct AttenuatorRef {
+    ip: IpAddr,
+}
+
+#[cfg(test)]
+mod test {
+    use super::*;
+
+    use crate::run;
+    use crate::runner::Runner;
+    use crate::{env::Environment, runner::ExitStatus};
+
+    use std::ffi::OsStr;
+
+    use assert_matches::assert_matches;
+    use indoc::formatdoc;
+    use pretty_assertions::assert_eq;
+    use serde_json::json;
+    use tempfile::{NamedTempFile, TempDir};
+
+    const FUCHSIA_NAME: &'static str = "fuchsia-1234-5678-9abc";
+    const FUCHSIA_ADDR: &'static str = "fe80::1%2";
+
+    #[derive(Default)]
+    struct MockRunner {
+        out_dir: PathBuf,
+        config: std::cell::Cell<PathBuf>,
+    }
+    impl MockRunner {
+        fn new(out_dir: PathBuf) -> Self {
+            Self { out_dir, ..Default::default() }
+        }
+    }
+    impl Runner for MockRunner {
+        fn run(&self, config: PathBuf) -> Result<ExitStatus> {
+            self.config.set(config);
+
+            let antlion_out = self.out_dir.join(TESTBED_NAME).join("latest");
+            fs::create_dir_all(&antlion_out)
+                .context("Failed to create antlion output directory")?;
+            fs::write(antlion_out.join(TEST_SUMMARY_FILE), "")
+                .context("Failed to write test_summary.yaml")?;
+            Ok(ExitStatus::Ok)
+        }
+    }
+
+    struct MockEnvironment {
+        config: Option<PathBuf>,
+        out_dir: Option<PathBuf>,
+    }
+    impl Environment for MockEnvironment {
+        fn var<K: AsRef<OsStr>>(&self, key: K) -> Result<String, std::env::VarError> {
+            if key.as_ref() == ENV_TESTBED_CONFIG {
+                self.config
+                    .clone()
+                    .ok_or(std::env::VarError::NotPresent)
+                    .map(|p| p.into_os_string().into_string().unwrap())
+            } else if key.as_ref() == ENV_OUT_DIR {
+                self.out_dir
+                    .clone()
+                    .ok_or(std::env::VarError::NotPresent)
+                    .map(|p| p.into_os_string().into_string().unwrap())
+            } else {
+                Err(std::env::VarError::NotPresent)
+            }
+        }
+    }
+
+    #[test]
+    fn infra_not_detected() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let env = MockEnvironment { config: None, out_dir: None };
+
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf());
+        assert_matches!(got, Err(InfraDriverError::NotDetected(_)));
+    }
+
+    #[test]
+    fn infra_not_detected_config() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+        let env = MockEnvironment { config: None, out_dir: Some(out_dir.path().to_path_buf()) };
+
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf());
+        assert_matches!(got, Err(InfraDriverError::NotDetected(v)) if v == ENV_TESTBED_CONFIG);
+    }
+
+    #[test]
+    fn infra_not_detected_out_dir() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(
+            testbed_config.as_file(),
+            &json!([{
+                "type": "FuchsiaDevice",
+                "nodename": FUCHSIA_NAME,
+                "ipv4": "",
+                "ipv6": FUCHSIA_ADDR,
+                "ssh_key": ssh_key.path(),
+            }]),
+        )
+        .unwrap();
+
+        let env =
+            MockEnvironment { config: Some(testbed_config.path().to_path_buf()), out_dir: None };
+
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf());
+        assert_matches!(got, Err(InfraDriverError::NotDetected(v)) if v == ENV_OUT_DIR);
+    }
+
+    #[test]
+    fn infra_invalid_config() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(testbed_config.as_file(), &json!({ "foo": "bar" })).unwrap();
+
+        let env = MockEnvironment {
+            config: Some(testbed_config.path().to_path_buf()),
+            out_dir: Some(out_dir.path().to_path_buf()),
+        };
+
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf());
+        assert_matches!(got, Err(_));
+    }
+
+    #[test]
+    fn infra() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(
+            testbed_config.as_file(),
+            &json!([{
+                "type": "FuchsiaDevice",
+                "nodename": FUCHSIA_NAME,
+                "ipv4": "",
+                "ipv6": FUCHSIA_ADDR,
+                "ssh_key": ssh_key.path(),
+            }]),
+        )
+        .unwrap();
+
+        let runner = MockRunner::new(out_dir.path().to_path_buf());
+        let env = MockEnvironment {
+            config: Some(testbed_config.path().to_path_buf()),
+            out_dir: Some(out_dir.path().to_path_buf()),
+        };
+        let driver =
+            InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf()).unwrap();
+        run(runner, driver, None).unwrap();
+
+        let got = fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
+
+        let ssh_path = ssh.path().display().to_string();
+        let ssh_key_path = ssh_key.path().display().to_string();
+        let ffx_path = ffx.path().display().to_string();
+        let out_path = out_dir.path().display();
+        let want = formatdoc! {r#"
+        TestBeds:
+        - Name: {TESTBED_NAME}
+          Controllers:
+            FuchsiaDevice:
+            - mdns_name: {FUCHSIA_NAME}
+              ip: {FUCHSIA_ADDR}
+              take_bug_report_on_fail: true
+              ssh_binary_path: {ssh_path}
+              ffx_binary_path: {ffx_path}
+              ssh_priv_key: {ssh_key_path}
+              hard_reboot_on_fail: true
+        MoblyParams:
+          LogPath: {out_path}
+        "#};
+
+        assert_eq!(got, want);
+    }
+
+    #[test]
+    fn infra_with_test_params() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(
+            testbed_config.as_file(),
+            &json!([{
+                "type": "FuchsiaDevice",
+                "nodename": FUCHSIA_NAME,
+                "ipv4": "",
+                "ipv6": FUCHSIA_ADDR,
+                "ssh_key": ssh_key.path(),
+                "test_params": {
+                    "sl4f_sanity_test_params": {
+                        "can_overwrite": false,
+                        "from_original": true,
+                    }
+                }
+            }]),
+        )
+        .unwrap();
+
+        let runner = MockRunner::new(out_dir.path().to_path_buf());
+        let env = MockEnvironment {
+            config: Some(testbed_config.path().to_path_buf()),
+            out_dir: Some(out_dir.path().to_path_buf()),
+        };
+        let driver =
+            InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf()).unwrap();
+        let params = "
+            sl4f_sanity_test_params:
+                merged_with: true
+                can_overwrite: true
+        ";
+        let params = serde_yaml::from_str(params).unwrap();
+        run(runner, driver, Some(params)).unwrap();
+
+        let got = fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
+
+        let ssh_path = ssh.path().display().to_string();
+        let ssh_key_path = ssh_key.path().display().to_string();
+        let ffx_path = ffx.path().display().to_string();
+        let out_path = out_dir.path().display();
+        let want = formatdoc! {r#"
+        TestBeds:
+        - Name: {TESTBED_NAME}
+          Controllers:
+            FuchsiaDevice:
+            - mdns_name: {FUCHSIA_NAME}
+              ip: {FUCHSIA_ADDR}
+              take_bug_report_on_fail: true
+              ssh_binary_path: {ssh_path}
+              ffx_binary_path: {ffx_path}
+              ssh_priv_key: {ssh_key_path}
+              hard_reboot_on_fail: true
+          TestParams:
+            sl4f_sanity_test_params:
+              can_overwrite: true
+              from_original: true
+              merged_with: true
+        MoblyParams:
+          LogPath: {out_path}
+        "#};
+
+        assert_eq!(got, want);
+    }
+
+    #[test]
+    fn infra_with_auxiliary_devices() {
+        const FUCHSIA_PDU_IP: &'static str = "192.168.42.14";
+        const FUCHSIA_PDU_PORT: u8 = 1;
+        const AP_IP: &'static str = "192.168.42.11";
+        const AP_AND_IPERF_PDU_IP: &'static str = "192.168.42.13";
+        const AP_PDU_PORT: u8 = 1;
+        const ATTENUATOR_IP: &'static str = "192.168.42.15";
+        const IPERF_IP: &'static str = "192.168.42.12";
+        const IPERF_USER: &'static str = "alice";
+        const IPERF_PDU_PORT: u8 = 2;
+
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(
+            testbed_config.as_file(),
+            &json!([{
+                "type": "FuchsiaDevice",
+                "nodename": FUCHSIA_NAME,
+                "ipv4": "",
+                "ipv6": FUCHSIA_ADDR,
+                "ssh_key": ssh_key.path(),
+                "pdu": {
+                    "ip": FUCHSIA_PDU_IP,
+                    "port": FUCHSIA_PDU_PORT,
+                },
+            }, {
+                "type": "AccessPoint",
+                "ip": AP_IP,
+                "ssh_key": ssh_key.path(),
+                "attenuator": {
+                    "ip": ATTENUATOR_IP,
+                },
+                "pdu": {
+                    "ip": AP_AND_IPERF_PDU_IP,
+                    "port": AP_PDU_PORT,
+                    "device": "fancy-pdu",
+                },
+            }, {
+                "type": "IPerfServer",
+                "ip": IPERF_IP,
+                "ssh_key": ssh_key.path(),
+                "user": IPERF_USER,
+                "test_interface": "eth0",
+                "pdu": {
+                    "ip": AP_AND_IPERF_PDU_IP,
+                    "port": IPERF_PDU_PORT,
+                    "device": "fancy-pdu",
+                },
+            }]),
+        )
+        .unwrap();
+
+        let runner = MockRunner::new(out_dir.path().to_path_buf());
+        let env = MockEnvironment {
+            config: Some(testbed_config.path().to_path_buf()),
+            out_dir: Some(out_dir.path().to_path_buf()),
+        };
+        let driver =
+            InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf()).unwrap();
+        run(runner, driver, None).unwrap();
+
+        let got = std::fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
+
+        let ssh_path = ssh.path().display().to_string();
+        let ssh_key_path = ssh_key.path().display().to_string();
+        let ffx_path = ffx.path().display().to_string();
+        let out_path = out_dir.path().display();
+        let want = formatdoc! {r#"
+        TestBeds:
+        - Name: {TESTBED_NAME}
+          Controllers:
+            FuchsiaDevice:
+            - mdns_name: {FUCHSIA_NAME}
+              ip: {FUCHSIA_ADDR}
+              take_bug_report_on_fail: true
+              ssh_binary_path: {ssh_path}
+              ffx_binary_path: {ffx_path}
+              ssh_priv_key: {ssh_key_path}
+              PduDevice:
+                device: synaccess.np02b
+                host: {FUCHSIA_PDU_IP}
+                port: {FUCHSIA_PDU_PORT}
+              hard_reboot_on_fail: true
+            AccessPoint:
+            - wan_interface: eth0
+              ssh_config:
+                ssh_binary_path: {ssh_path}
+                host: {AP_IP}
+                user: root
+                identity_file: {ssh_key_path}
+              PduDevice:
+                device: fancy-pdu
+                host: {AP_AND_IPERF_PDU_IP}
+                port: {AP_PDU_PORT}
+              Attenuator:
+              - Address: {ATTENUATOR_IP}
+                attenuator_ports_wifi_2g:
+                - 1
+                - 2
+                - 3
+                attenuator_ports_wifi_5g:
+                - 1
+                - 2
+                - 3
+            Attenuator:
+            - Model: minicircuits
+              InstrumentCount: 4
+              Address: {ATTENUATOR_IP}
+              Protocol: http
+              Port: 80
+            PduDevice:
+            - device: fancy-pdu
+              host: {AP_AND_IPERF_PDU_IP}
+            - device: synaccess.np02b
+              host: {FUCHSIA_PDU_IP}
+            IPerfServer:
+            - ssh_config:
+                ssh_binary_path: {ssh_path}
+                host: {IPERF_IP}
+                user: {IPERF_USER}
+                identity_file: {ssh_key_path}
+              port: 5201
+              test_interface: eth0
+              use_killall: true
+        MoblyParams:
+          LogPath: {out_path}
+        "#};
+
+        assert_eq!(got, want);
+    }
+
+    #[test]
+    fn infra_duplicate_port_pdu() {
+        let pdu_ip: IpAddr = "192.168.42.13".parse().unwrap();
+        let pdu_port = 1;
+
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(
+            testbed_config.as_file(),
+            &json!([{
+                "type": "FuchsiaDevice",
+                "nodename": "foo",
+                "ipv4": "",
+                "ipv6": "fe80::1%2",
+                "ssh_key": ssh_key.path(),
+                "pdu": {
+                    "ip": pdu_ip,
+                    "port": pdu_port,
+                },
+            }, {
+                "type": "AccessPoint",
+                "ip": "192.168.42.11",
+                "ssh_key": ssh_key.path(),
+                "pdu": {
+                    "ip": pdu_ip,
+                    "port": pdu_port,
+                },
+            }]),
+        )
+        .unwrap();
+
+        let env = MockEnvironment {
+            config: Some(testbed_config.path().to_path_buf()),
+            out_dir: Some(out_dir.path().to_path_buf()),
+        };
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf());
+        assert_matches!(got,
+            Err(InfraDriverError::Config(ConfigError::DuplicatePort { ip, port }))
+                if ip == pdu_ip && port == pdu_port
+        );
+    }
+
+    #[test]
+    fn infra_duplicate_ip_pdu() {
+        let duplicate_ip: IpAddr = "192.168.42.13".parse().unwrap();
+
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(
+            testbed_config.as_file(),
+            &json!([{
+                "type": "FuchsiaDevice",
+                "nodename": "foo",
+                "ipv4": "",
+                "ipv6": "fe80::1%2",
+                "ssh_key": ssh_key.path(),
+                "pdu": {
+                    "ip": duplicate_ip,
+                    "port": 1,
+                    "device": "A",
+                },
+            }, {
+                "type": "AccessPoint",
+                "ip": "192.168.42.11",
+                "ssh_key": ssh_key.path(),
+                "pdu": {
+                    "ip": duplicate_ip,
+                    "port": 2,
+                    "device": "B",
+                },
+            }]),
+        )
+        .unwrap();
+
+        let env = MockEnvironment {
+            config: Some(testbed_config.path().to_path_buf()),
+            out_dir: Some(out_dir.path().to_path_buf()),
+        };
+        assert_matches!(
+            InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf()),
+            Err(InfraDriverError::Config(ConfigError::DuplicateIp { ip }))
+                if ip == duplicate_ip
+        );
+    }
+
+    #[test]
+    fn infra_duplicate_ip_devices() {
+        let duplicate_ip: IpAddr = "192.168.42.11".parse().unwrap();
+
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(
+            testbed_config.as_file(),
+            &json!([{
+                "type": "FuchsiaDevice",
+                "nodename": "foo",
+                "ipv4": duplicate_ip,
+                "ipv6": "",
+                "ssh_key": ssh_key.path(),
+            }, {
+                "type": "AccessPoint",
+                "ip": duplicate_ip,
+                "ssh_key": ssh_key.path(),
+            }]),
+        )
+        .unwrap();
+
+        let env = MockEnvironment {
+            config: Some(testbed_config.path().to_path_buf()),
+            out_dir: Some(out_dir.path().to_path_buf()),
+        };
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf());
+        assert_matches!(got,
+            Err(InfraDriverError::Config(ConfigError::DuplicateIp { ip }))
+                if ip == duplicate_ip
+        );
+    }
+
+    #[test]
+    fn remove_symlinks_works() {
+        const SYMLINK_FILE: &'static str = "latest";
+
+        let out_dir = TempDir::new().unwrap();
+        let test_file = NamedTempFile::new_in(&out_dir).unwrap();
+        let symlink_path = out_dir.path().join(SYMLINK_FILE);
+
+        #[cfg(unix)]
+        std::os::unix::fs::symlink(&test_file, &symlink_path).unwrap();
+        #[cfg(windows)]
+        std::os::windows::fs::symlink_file(&test_file, &symlink_path).unwrap();
+
+        assert_matches!(remove_symlinks(out_dir.path()), Ok(()));
+        assert_matches!(fs::symlink_metadata(symlink_path), Err(e) if e.kind() == std::io::ErrorKind::NotFound);
+        assert_matches!(fs::symlink_metadata(test_file), Ok(meta) if meta.is_file());
+    }
+}
diff --git a/runner/src/driver/local.rs b/runner/src/driver/local.rs
new file mode 100644
index 0000000..983a6a7
--- /dev/null
+++ b/runner/src/driver/local.rs
@@ -0,0 +1,287 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+use crate::config;
+use crate::driver::Driver;
+use crate::finder::{Answer, Finder};
+use crate::net::IpAddr;
+
+use std::path::{Path, PathBuf};
+
+use anyhow::{ensure, Context, Result};
+use home::home_dir;
+
+const TESTBED_NAME: &'static str = "antlion-runner";
+
+/// Driver for running antlion locally on an emulated or hardware testbed with
+/// optional mDNS discovery when a DHCP server is not available. This is useful
+/// for testing changes locally in a development environment.
+pub(crate) struct LocalDriver {
+    target: LocalTarget,
+    output_dir: PathBuf,
+    ssh_binary: PathBuf,
+    ffx_binary: PathBuf,
+}
+
+impl LocalDriver {
+    pub fn new<F>(
+        device: Option<String>,
+        ssh_binary: PathBuf,
+        ssh_key: Option<PathBuf>,
+        ffx_binary: PathBuf,
+        out_dir: Option<PathBuf>,
+    ) -> Result<Self>
+    where
+        F: Finder,
+    {
+        let output_dir = match out_dir {
+            Some(p) => Ok(p),
+            None => std::env::current_dir().context("Failed to get current working directory"),
+        }?;
+        Ok(Self {
+            target: LocalTarget::new::<F>(device, ssh_key)?,
+            output_dir,
+            ssh_binary,
+            ffx_binary,
+        })
+    }
+}
+
+impl Driver for LocalDriver {
+    fn output_path(&self) -> &Path {
+        self.output_dir.as_path()
+    }
+    fn config(&self) -> config::Config {
+        config::Config {
+            testbeds: vec![config::Testbed {
+                name: TESTBED_NAME.to_string(),
+                controllers: config::Controllers {
+                    fuchsia_devices: vec![config::Fuchsia {
+                        mdns_name: self.target.name.clone(),
+                        ip: self.target.ip.clone(),
+                        take_bug_report_on_fail: true,
+                        ssh_binary_path: self.ssh_binary.clone(),
+                        // TODO(http://b/244747218): Remove when ssh_config is refactored away
+                        ssh_config: None,
+                        ffx_binary_path: self.ffx_binary.clone(),
+                        ssh_priv_key: self.target.ssh_key.clone(),
+                        pdu_device: None,
+                        hard_reboot_on_fail: true,
+                    }],
+                    ..Default::default()
+                },
+                test_params: None,
+            }],
+            mobly_params: config::MoblyParams { log_path: self.output_dir.clone() },
+        }
+    }
+    fn teardown(&self) -> Result<()> {
+        println!(
+            "\nView full antlion logs at {}",
+            self.output_dir.join(TESTBED_NAME).join("latest").display()
+        );
+        Ok(())
+    }
+}
+
+/// LocalTargetInfo performs best-effort discovery of target information from
+/// standard Fuchsia environmental variables.
+struct LocalTarget {
+    name: String,
+    ip: IpAddr,
+    ssh_key: PathBuf,
+}
+
+impl LocalTarget {
+    fn new<F>(device: Option<String>, ssh_key: Option<PathBuf>) -> Result<Self>
+    where
+        F: Finder,
+    {
+        let device_name = device.or_else(|| match std::env::var("FUCHSIA_DIR") {
+            Ok(dir) => match std::fs::read_to_string(format!("{dir}/out/default.device")) {
+                Ok(name) => Some(name.trim().to_string()),
+                Err(_) => {
+                    println!("A default device using \"fx set-device\" has not been set");
+                    println!("Using the first Fuchsia device discovered via mDNS");
+                    None
+                }
+            },
+            Err(_) => {
+                println!("Neither --device nor FUCHSIA_DIR has been set");
+                println!("Using the first Fuchsia device discovered via mDNS");
+                None
+            }
+        });
+
+        let Answer { name, ip } = F::find_device(device_name)?;
+
+        // TODO: Move this validation out to Args
+        let ssh_key = ssh_key
+            .or_else(|| home_dir().map(|p| p.join(".ssh/fuchsia_ed25519").to_path_buf()))
+            .context("Failed to detect the private Fuchsia SSH key")?;
+
+        ensure!(
+            ssh_key.try_exists().with_context(|| format!(
+                "Failed to check existence of SSH key \"{}\"",
+                ssh_key.display()
+            ))?,
+            "Cannot find SSH key \"{}\"",
+            ssh_key.display()
+        );
+
+        Ok(LocalTarget { name, ip, ssh_key })
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use super::*;
+
+    use crate::finder::{Answer, Finder};
+    use crate::run;
+    use crate::runner::{ExitStatus, Runner};
+
+    use indoc::formatdoc;
+    use pretty_assertions::assert_eq;
+    use tempfile::{NamedTempFile, TempDir};
+
+    const FUCHSIA_NAME: &'static str = "fuchsia-1234-5678-9abc";
+    const FUCHSIA_ADDR: &'static str = "fe80::1%2";
+    const FUCHSIA_IP: &'static str = "fe80::1";
+    const SCOPE_ID: u32 = 2;
+
+    struct MockFinder;
+    impl Finder for MockFinder {
+        fn find_device(_: Option<String>) -> Result<Answer> {
+            Ok(Answer {
+                name: FUCHSIA_NAME.to_string(),
+                ip: IpAddr::V6(FUCHSIA_IP.parse().unwrap(), Some(SCOPE_ID)),
+            })
+        }
+    }
+
+    #[derive(Default)]
+    struct MockRunner {
+        config: std::cell::Cell<PathBuf>,
+    }
+    impl Runner for MockRunner {
+        fn run(&self, config: PathBuf) -> Result<ExitStatus> {
+            self.config.set(config);
+            Ok(ExitStatus::Ok)
+        }
+    }
+
+    #[test]
+    fn local_invalid_ssh_key() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        assert!(LocalDriver::new::<MockFinder>(
+            None,
+            ssh.path().to_path_buf(),
+            Some(PathBuf::new()),
+            ffx.path().to_path_buf(),
+            Some(out_dir.path().to_path_buf()),
+        )
+        .is_err());
+    }
+
+    #[test]
+    fn local() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let runner = MockRunner::default();
+        let driver = LocalDriver::new::<MockFinder>(
+            None,
+            ssh.path().to_path_buf(),
+            Some(ssh_key.path().to_path_buf()),
+            ffx.path().to_path_buf(),
+            Some(out_dir.path().to_path_buf()),
+        )
+        .unwrap();
+
+        run(runner, driver, None).unwrap();
+
+        let got = std::fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
+
+        let ssh_path = ssh.path().display();
+        let ssh_key_path = ssh_key.path().display();
+        let ffx_path = ffx.path().display();
+        let out_path = out_dir.path().display();
+        let want = formatdoc! {r#"
+        TestBeds:
+        - Name: {TESTBED_NAME}
+          Controllers:
+            FuchsiaDevice:
+            - mdns_name: {FUCHSIA_NAME}
+              ip: {FUCHSIA_ADDR}
+              take_bug_report_on_fail: true
+              ssh_binary_path: {ssh_path}
+              ffx_binary_path: {ffx_path}
+              ssh_priv_key: {ssh_key_path}
+              hard_reboot_on_fail: true
+        MoblyParams:
+          LogPath: {out_path}
+        "#};
+
+        assert_eq!(got, want);
+    }
+
+    #[test]
+    fn local_with_test_params() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let runner = MockRunner::default();
+        let driver = LocalDriver::new::<MockFinder>(
+            None,
+            ssh.path().to_path_buf(),
+            Some(ssh_key.path().to_path_buf()),
+            ffx.path().to_path_buf(),
+            Some(out_dir.path().to_path_buf()),
+        )
+        .unwrap();
+
+        let params_yaml = "
+        sl4f_sanity_test_params:
+            foo: bar
+        ";
+        let params = serde_yaml::from_str(params_yaml).unwrap();
+
+        run(runner, driver, Some(params)).unwrap();
+
+        let got = std::fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
+
+        let ssh_path = ssh.path().display().to_string();
+        let ssh_key_path = ssh_key.path().display().to_string();
+        let ffx_path = ffx.path().display().to_string();
+        let out_path = out_dir.path().display();
+        let want = formatdoc! {r#"
+        TestBeds:
+        - Name: {TESTBED_NAME}
+          Controllers:
+            FuchsiaDevice:
+            - mdns_name: {FUCHSIA_NAME}
+              ip: {FUCHSIA_ADDR}
+              take_bug_report_on_fail: true
+              ssh_binary_path: {ssh_path}
+              ffx_binary_path: {ffx_path}
+              ssh_priv_key: {ssh_key_path}
+              hard_reboot_on_fail: true
+          TestParams:
+            sl4f_sanity_test_params:
+              foo: bar
+        MoblyParams:
+          LogPath: {out_path}
+        "#};
+
+        assert_eq!(got, want);
+    }
+}
diff --git a/runner/src/driver/mod.rs b/runner/src/driver/mod.rs
new file mode 100644
index 0000000..35de41f
--- /dev/null
+++ b/runner/src/driver/mod.rs
@@ -0,0 +1,24 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+pub(crate) mod infra;
+pub(crate) mod local;
+
+use crate::config::Config;
+
+use std::path::Path;
+
+use anyhow::Result;
+
+/// Driver provide insight into the information surrounding running an antlion
+/// test.
+pub(crate) trait Driver {
+    /// Path to output directory for test artifacts.
+    fn output_path(&self) -> &Path;
+    /// Antlion config for use during test.
+    fn config(&self) -> Config;
+    /// Additional logic to run after all tests run, regardless of tests passing
+    /// or failing.
+    fn teardown(&self) -> Result<()>;
+}
diff --git a/runner/src/env.rs b/runner/src/env.rs
new file mode 100644
index 0000000..ede8b74
--- /dev/null
+++ b/runner/src/env.rs
@@ -0,0 +1,25 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+use std::ffi::OsStr;
+use std::env::VarError;
+
+/// Inspection of the process's environment.
+pub(crate) trait Environment {
+	/// Fetches the environment variable `key` from the current process.
+	/// 
+	/// See [std::env::var] for details.
+	///
+	/// [std::env::var]: https://doc.rust-lang.org/std/env/fn.var.html
+    fn var<K: AsRef<OsStr>>(&self, key: K) -> Result<String, VarError>;
+}
+
+/// Query the local process's environment.
+pub(crate) struct LocalEnvironment;
+
+impl Environment for LocalEnvironment {
+    fn var<K: AsRef<OsStr>>(&self, key: K) -> Result<String, VarError> {
+        std::env::var(key)
+    }
+}
diff --git a/runner/src/finder.rs b/runner/src/finder.rs
new file mode 100644
index 0000000..c381b36
--- /dev/null
+++ b/runner/src/finder.rs
@@ -0,0 +1,200 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+use crate::net::IpAddr;
+
+use std::io;
+use std::net::{Ipv6Addr, SocketAddr, SocketAddrV6, UdpSocket};
+use std::str;
+use std::time::{Duration, Instant};
+
+use anyhow::{bail, Context, Result};
+use mdns::protocol as dns;
+use netext::{get_mcast_interfaces, IsLocalAddr, McastInterface};
+use packet::{InnerPacketBuilder, ParseBuffer};
+use socket2::{Domain, Protocol, Socket, Type};
+
+const FUCHSIA_DOMAIN: &str = "_fuchsia._udp.local";
+const MDNS_MCAST_V6: Ipv6Addr = Ipv6Addr::new(0xff02, 0, 0, 0, 0, 0, 0, 0x00fb);
+const MDNS_PORT: u16 = 5353;
+const MDNS_TIMEOUT: Duration = Duration::from_secs(10);
+
+lazy_static::lazy_static! {
+    static ref MDNS_QUERY: &'static [u8] = construct_query_buf(FUCHSIA_DOMAIN);
+}
+
+/// Find Fuchsia devices.
+pub(crate) trait Finder {
+    /// Find a Fuchsia device, preferring `device_name` if specified.
+    fn find_device(device_name: Option<String>) -> Result<Answer>;
+}
+
+/// Answer from a Finder.
+pub(crate) struct Answer {
+    /// Name of the Fuchsia device.
+    pub name: String,
+    /// IP address of the Fuchsia device.
+    pub ip: IpAddr,
+}
+
+pub(crate) struct MulticastDns {}
+
+impl Finder for MulticastDns {
+    /// Find a Fuchsia device using mDNS. If `device_name` is not specified, the
+    /// first device will be used.
+    fn find_device(device_name: Option<String>) -> Result<Answer> {
+        let interfaces =
+            get_mcast_interfaces().context("Failed to list multicast-enabled interfaces")?;
+        let interface_names =
+            interfaces.iter().map(|i| i.name.clone()).collect::<Vec<String>>().join(", ");
+        if let Some(ref d) = device_name {
+            println!("Performing mDNS discovery for {d} on interfaces: {interface_names}");
+        } else {
+            println!("Performing mDNS discovery on interfaces: {interface_names}");
+        }
+
+        let socket = create_socket(interfaces.iter()).context("Failed to create mDNS socket")?;
+
+        // TODO(http://b/264936590): Remove the race condition where the Fuchsia
+        // device can send its answer before this socket starts listening. Add an
+        // async runtime and concurrently listen for answers while sending queries.
+        send_queries(&socket, interfaces.iter()).context("Failed to send mDNS queries")?;
+        let answer = listen_for_answers(socket, device_name)?;
+
+        println!("Device {} found at {}", answer.name, answer.ip);
+        Ok(answer)
+    }
+}
+
+fn construct_query_buf(service: &str) -> &'static [u8] {
+    let question = dns::QuestionBuilder::new(
+        dns::DomainBuilder::from_str(service).unwrap(),
+        dns::Type::Ptr,
+        dns::Class::In,
+        true,
+    );
+
+    let mut message = dns::MessageBuilder::new(0, true);
+    message.add_question(question);
+
+    let mut buf = vec![0; message.bytes_len()];
+    message.serialize(buf.as_mut_slice());
+    Box::leak(buf.into_boxed_slice())
+}
+
+/// Create a socket for both sending and listening on all multicast-capable
+/// interfaces.
+fn create_socket<'a>(interfaces: impl Iterator<Item = &'a McastInterface>) -> Result<Socket> {
+    let socket = Socket::new(Domain::IPV6, Type::DGRAM, Some(Protocol::UDP))?;
+    let read_timeout = Duration::from_millis(100);
+    socket
+        .set_read_timeout(Some(read_timeout))
+        .with_context(|| format!("Failed to set SO_RCVTIMEO to {}ms", read_timeout.as_millis()))?;
+    socket.set_only_v6(true).context("Failed to set IPV6_V6ONLY")?;
+    socket.set_reuse_address(true).context("Failed to set SO_REUSEADDR")?;
+    socket.set_reuse_port(true).context("Failed to set SO_REUSEPORT")?;
+
+    for interface in interfaces {
+        // Listen on all multicast-enabled interfaces
+        match interface.id() {
+            Ok(id) => match socket.join_multicast_v6(&MDNS_MCAST_V6, id) {
+                Ok(()) => {}
+                Err(e) => eprintln!("Failed to join mDNS multicast group on interface {id}: {e}"),
+            },
+            Err(e) => eprintln!("Failed to listen on interface {}: {}", interface.name, e),
+        }
+    }
+
+    socket
+        .bind(&SocketAddrV6::new(Ipv6Addr::UNSPECIFIED, 0, 0, 0).into())
+        .with_context(|| format!("Failed to bind to unspecified IPv6"))?;
+
+    Ok(socket)
+}
+
+fn send_queries<'a>(
+    socket: &Socket,
+    interfaces: impl Iterator<Item = &'a McastInterface>,
+) -> Result<()> {
+    let to_addr = SocketAddrV6::new(MDNS_MCAST_V6, MDNS_PORT, 0, 0).into();
+
+    for interface in interfaces {
+        let id = interface
+            .id()
+            .with_context(|| format!("Failed to get interface ID for {}", interface.name))?;
+        socket
+            .set_multicast_if_v6(id)
+            .with_context(|| format!("Failed to set multicast interface for {}", interface.name))?;
+        for addr in &interface.addrs {
+            if let SocketAddr::V6(addr_v6) = addr {
+                if !addr.ip().is_local_addr() || addr.ip().is_loopback() {
+                    continue;
+                }
+                if let Err(e) = socket.send_to(&MDNS_QUERY, &to_addr) {
+                    eprintln!(
+                        "Failed to send mDNS query out {} via {}: {e}",
+                        interface.name,
+                        addr_v6.ip()
+                    );
+                    continue;
+                }
+            }
+        }
+    }
+    Ok(())
+}
+
+fn listen_for_answers(socket: Socket, device_name: Option<String>) -> Result<Answer> {
+    let s: UdpSocket = socket.into();
+    let mut buf = [0; 1500];
+
+    let end = Instant::now() + MDNS_TIMEOUT;
+    while Instant::now() < end {
+        match s.recv_from(&mut buf) {
+            Ok((packet_bytes, src_sock_addr)) => {
+                if !src_sock_addr.ip().is_local_addr() {
+                    continue;
+                }
+
+                let mut packet_buf = &mut buf[..packet_bytes];
+                match packet_buf.parse::<dns::Message<_>>() {
+                    Ok(message) => {
+                        if !message.answers.iter().any(|a| a.domain == FUCHSIA_DOMAIN) {
+                            continue;
+                        }
+                        for answer in message.additional {
+                            if let Some(std::net::IpAddr::V6(addr)) = answer.rdata.ip_addr() {
+                                if let SocketAddr::V6(src_v6) = src_sock_addr {
+                                    let name = answer
+                                        .domain
+                                        .to_string()
+                                        .trim_end_matches(".local")
+                                        .to_string();
+                                    let scope_id = src_v6.scope_id();
+
+                                    if let Some(ref device) = device_name {
+                                        if &name != device {
+                                            println!("Found irrelevant device {name} at {addr}%{scope_id}");
+                                            continue;
+                                        }
+                                    }
+
+                                    return Ok(Answer {
+                                        name,
+                                        ip: IpAddr::V6(addr, Some(scope_id)),
+                                    });
+                                }
+                            }
+                        }
+                    }
+                    Err(err) => eprintln!("Failed to parse mDNS packet: {err:?}"),
+                }
+            }
+            Err(err) if err.kind() == io::ErrorKind::WouldBlock => {}
+            Err(err) => return Err(err.into()),
+        }
+    }
+
+    bail!("device {device_name:?} not found")
+}
diff --git a/runner/src/main.rs b/runner/src/main.rs
new file mode 100644
index 0000000..4252694
--- /dev/null
+++ b/runner/src/main.rs
@@ -0,0 +1,149 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+mod config;
+mod driver;
+mod env;
+mod finder;
+mod net;
+mod runner;
+mod yaml;
+
+use crate::driver::infra::{InfraDriver, InfraDriverError};
+use crate::runner::ExitStatus;
+
+use std::fs::File;
+use std::path::PathBuf;
+use std::{fs, process::ExitCode};
+
+use anyhow::{Context, Result};
+use argh::FromArgs;
+use serde_yaml;
+use serde_yaml::Value;
+
+#[derive(FromArgs)]
+/// antlion runner with config generation
+struct Args {
+    /// name of the Fuchsia device to use for testing; defaults to using mDNS
+    /// discovery
+    #[argh(option)]
+    device: Option<String>,
+
+    /// path to the SSH binary used to communicate with all devices
+    #[argh(option, from_str_fn(parse_file))]
+    ssh_binary: PathBuf,
+
+    /// path to the SSH private key used to communicate with Fuchsia; defaults
+    /// to ~/.ssh/fuchsia_ed25519
+    #[argh(option, from_str_fn(parse_file))]
+    ssh_key: Option<PathBuf>,
+
+    /// path to the FFX binary used to communicate with Fuchsia
+    #[argh(option, from_str_fn(parse_file))]
+    ffx_binary: PathBuf,
+
+    /// path to the python interpreter binary (e.g. /bin/python3.9)
+    #[argh(option)]
+    python_bin: String,
+
+    /// path to the antlion zipapp, ending in .pyz
+    #[argh(option, from_str_fn(parse_file))]
+    antlion_pyz: PathBuf,
+
+    /// path to a directory for outputting artifacts; defaults to the current
+    /// working directory or FUCHSIA_TEST_OUTDIR
+    #[argh(option, from_str_fn(parse_directory))]
+    out_dir: Option<PathBuf>,
+
+    /// path to additional YAML config for this test; placed in the
+    /// "test_params" key in the antlion config
+    #[argh(option, from_str_fn(parse_file))]
+    test_params: Option<PathBuf>,
+}
+
+fn parse_file(s: &str) -> Result<PathBuf, String> {
+    let path = PathBuf::from(s);
+    let _ = File::open(&path).map_err(|e| format!("Failed to open \"{s}\": {e}"))?;
+    Ok(path)
+}
+
+fn parse_directory(s: &str) -> Result<PathBuf, String> {
+    let path = PathBuf::from(s);
+    let meta =
+        std::fs::metadata(&path).map_err(|e| format!("Failed to read metadata of \"{s}\": {e}"))?;
+    if meta.is_file() {
+        return Err(format!("Expected a directory but found a file at \"{s}\""));
+    }
+    Ok(path)
+}
+
+fn run<R, D>(runner: R, driver: D, test_params: Option<Value>) -> Result<ExitCode>
+where
+    R: runner::Runner,
+    D: driver::Driver,
+{
+    let mut config = driver.config();
+    if let Some(params) = test_params {
+        config.merge_test_params(params);
+    }
+
+    let yaml =
+        serde_yaml::to_string(&config).context("Failed to convert antlion config to YAML")?;
+
+    let output_path = driver.output_path().to_path_buf();
+    let config_path = output_path.join("config.yaml");
+    println!("Writing {}", config_path.display());
+    println!("\n{yaml}\n");
+    fs::write(&config_path, yaml).context("Failed to write config to file")?;
+
+    let exit_code = runner.run(config_path).context("Failed to run antlion")?;
+    match exit_code {
+        ExitStatus::Ok => println!("Antlion successfully exited"),
+        ExitStatus::Err(code) => eprintln!("Antlion failed with status code {}", code),
+        ExitStatus::Interrupt(Some(code)) => eprintln!("Antlion interrupted by signal {}", code),
+        ExitStatus::Interrupt(None) => eprintln!("Antlion interrupted by signal"),
+    };
+    driver.teardown().context("Failed to teardown environment")?;
+    Ok(exit_code.into())
+}
+
+fn main() -> Result<ExitCode> {
+    let args: Args = argh::from_env();
+    let env = env::LocalEnvironment;
+    let runner =
+        runner::ProcessRunner { python_bin: args.python_bin, antlion_pyz: args.antlion_pyz };
+
+    let test_params = match args.test_params {
+        Some(path) => {
+            let text = fs::read_to_string(&path)
+                .with_context(|| format!("Failed to read file \"{}\"", path.display()))?;
+            let yaml = serde_yaml::from_str(&text)
+                .with_context(|| format!("Failed to parse \"{text}\" as YAML"))?;
+            Some(yaml)
+        }
+        None => None,
+    };
+
+    match InfraDriver::new(env, args.ssh_binary.clone(), args.ffx_binary.clone()) {
+        Ok(env) => return run(runner, env, test_params),
+        Err(InfraDriverError::NotDetected(_)) => {}
+        Err(InfraDriverError::Config(e)) => {
+            return Err(anyhow::Error::from(e).context("Config validation"))
+        }
+        Err(InfraDriverError::Other(e)) => {
+            return Err(anyhow::Error::from(e).context("Unexpected infra driver error"))
+        }
+    };
+
+    let env = driver::local::LocalDriver::new::<finder::MulticastDns>(
+        args.device.clone(),
+        args.ssh_binary.clone(),
+        args.ssh_key.clone(),
+        args.ffx_binary.clone(),
+        args.out_dir.clone(),
+    )
+    .context("Failed to detect local environment")?;
+
+    run(runner, env, test_params)
+}
diff --git a/runner/src/net.rs b/runner/src/net.rs
new file mode 100644
index 0000000..70db2eb
--- /dev/null
+++ b/runner/src/net.rs
@@ -0,0 +1,231 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+use std::fmt::{Debug, Display};
+use std::marker::PhantomData;
+use std::net::{Ipv4Addr, Ipv6Addr};
+
+use netext::IsLocalAddr;
+use nix::net::if_::if_nametoindex;
+use serde::{Deserialize, Serialize};
+use thiserror::Error;
+
+/// IP address with support for IPv6 scope identifiers as defined in RFC 4007.
+#[derive(Copy, Clone, Eq, PartialEq, Hash, PartialOrd, Ord)]
+pub enum IpAddr {
+    /// An IPv4 address.
+    V4(Ipv4Addr),
+    /// An IPv6 address with optional scope identifier.
+    V6(Ipv6Addr, Option<u32>),
+}
+
+impl Into<std::net::IpAddr> for IpAddr {
+    fn into(self) -> std::net::IpAddr {
+        match self {
+            IpAddr::V4(ip) => std::net::IpAddr::from(ip),
+            IpAddr::V6(ip, _) => std::net::IpAddr::from(ip),
+        }
+    }
+}
+
+impl From<Ipv6Addr> for IpAddr {
+    fn from(value: Ipv6Addr) -> Self {
+        IpAddr::V6(value, None)
+    }
+}
+
+impl From<Ipv4Addr> for IpAddr {
+    fn from(value: Ipv4Addr) -> Self {
+        IpAddr::V4(value)
+    }
+}
+
+impl From<std::net::IpAddr> for IpAddr {
+    fn from(value: std::net::IpAddr) -> Self {
+        match value {
+            std::net::IpAddr::V4(ip) => IpAddr::from(ip),
+            std::net::IpAddr::V6(ip) => IpAddr::from(ip),
+        }
+    }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Error)]
+/// An error which can be returned when parsing an IP address with optional IPv6
+/// scope ID. See [`std::net::AddrParseError`].
+pub enum AddrParseError {
+    #[error(transparent)]
+    IpInvalid(#[from] std::net::AddrParseError),
+    #[error("no interface found with name \"{0}\"")]
+    InterfaceNotFound(String),
+    #[error("only IPv6 link-local may include a scope ID")]
+    /// Scope IDs are only supported for IPv6 link-local addresses as per RFC
+    /// 6874 Section 4.
+    ScopeNotSupported,
+}
+
+impl std::str::FromStr for IpAddr {
+    type Err = AddrParseError;
+
+    fn from_str(s: &str) -> Result<Self, Self::Err> {
+        let mut parts = s.splitn(2, '%');
+        let addr = parts.next().unwrap(); // first element is guaranteed
+        let ip = std::net::IpAddr::from_str(addr)?;
+        let scope = parts.next();
+        match (ip, scope) {
+            (std::net::IpAddr::V4(ip), None) => Ok(IpAddr::from(ip)),
+            (std::net::IpAddr::V4(_), Some(_)) => Err(AddrParseError::ScopeNotSupported),
+            (std::net::IpAddr::V6(ip), None) => Ok(IpAddr::V6(ip, None)),
+            (std::net::IpAddr::V6(ip), Some(scope)) => {
+                if !ip.is_link_local_addr() {
+                    return Err(AddrParseError::ScopeNotSupported);
+                }
+                if let Ok(index) = scope.parse::<u32>() {
+                    return Ok(IpAddr::V6(ip, Some(index)));
+                }
+                match if_nametoindex(scope) {
+                    Ok(index) => Ok(IpAddr::V6(ip, Some(index))),
+                    Err(_) => Err(AddrParseError::InterfaceNotFound(scope.to_string())),
+                }
+            }
+        }
+    }
+}
+
+impl Display for IpAddr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        match self {
+            IpAddr::V4(ip) => Display::fmt(ip, f),
+            IpAddr::V6(ip, None) => Display::fmt(ip, f),
+            IpAddr::V6(ip, Some(scope)) => {
+                Display::fmt(ip, f)?;
+                write!(f, "%{}", scope)
+            }
+        }
+    }
+}
+
+impl Debug for IpAddr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        Display::fmt(self, f)
+    }
+}
+
+impl Serialize for IpAddr {
+    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+    where
+        S: serde::Serializer,
+    {
+        serializer.serialize_str(self.to_string().as_str())
+    }
+}
+
+impl<'de> Deserialize<'de> for IpAddr {
+    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+    where
+        D: serde::Deserializer<'de>,
+    {
+        deserializer.deserialize_str(FromStrVisitor::new())
+    }
+}
+
+struct FromStrVisitor<T> {
+    ty: PhantomData<T>,
+}
+
+impl<T> FromStrVisitor<T> {
+    fn new() -> Self {
+        FromStrVisitor { ty: PhantomData }
+    }
+}
+
+impl<'de, T> serde::de::Visitor<'de> for FromStrVisitor<T>
+where
+    T: std::str::FromStr,
+    T::Err: std::fmt::Display,
+{
+    type Value = T;
+
+    fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        formatter.write_str("IP address")
+    }
+
+    fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
+    where
+        E: serde::de::Error,
+    {
+        s.parse().map_err(serde::de::Error::custom)
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use super::{AddrParseError, IpAddr};
+    use assert_matches::assert_matches;
+
+    #[test]
+    fn parse_ip_invalid() {
+        assert_matches!("".parse::<IpAddr>(), Err(AddrParseError::IpInvalid(_)));
+        assert_matches!("192.168.1.".parse::<IpAddr>(), Err(AddrParseError::IpInvalid(_)));
+        assert_matches!("fe80:".parse::<IpAddr>(), Err(AddrParseError::IpInvalid(_)));
+    }
+
+    #[test]
+    fn parse_ipv4() {
+        assert_matches!(
+            "192.168.1.1".parse::<IpAddr>(),
+            Ok(IpAddr::V4(ip))
+                if ip == "192.168.1.1".parse::<std::net::Ipv4Addr>().unwrap()
+        );
+    }
+
+    #[test]
+    fn parse_ipv4_with_scope() {
+        assert_matches!(
+            "192.168.1.1%1".parse::<IpAddr>(),
+            Err(AddrParseError::ScopeNotSupported)
+        );
+    }
+
+    #[test]
+    fn parse_ipv6() {
+        assert_matches!(
+            "fe80::1".parse::<IpAddr>(),
+            Ok(IpAddr::V6(ip, None))
+                if ip == "fe80::1".parse::<std::net::Ipv6Addr>().unwrap()
+        );
+    }
+
+    #[test]
+    fn parse_ipv6_global_with_scope() {
+        assert_matches!("2001::1%1".parse::<IpAddr>(), Err(AddrParseError::ScopeNotSupported));
+    }
+
+    #[test]
+    fn parse_ipv6_link_local_with_scope() {
+        assert_matches!(
+            "fe80::1%1".parse::<IpAddr>(),
+            Ok(IpAddr::V6(ip, Some(scope)))
+                if ip == "fe80::1".parse::<std::net::Ipv6Addr>().unwrap()
+                && scope == 1
+        );
+    }
+
+    #[test]
+    fn parse_ipv6_link_local_with_scope_interface_not_found() {
+        // An empty scope ID should trigger a failed lookup.
+        assert_matches!(
+            "fe80::1%".parse::<IpAddr>(),
+            Err(AddrParseError::InterfaceNotFound(name))
+                if name == ""
+        );
+
+        // The trailing '%' forces a failed lookup. At the time of writing, no
+        // OS supports this character as part of interface names.
+        assert_matches!(
+            "fe80::1%eth0%".parse::<IpAddr>(),
+            Err(AddrParseError::InterfaceNotFound(name))
+                if name == "eth0%"
+        );
+    }
+}
diff --git a/runner/src/runner.rs b/runner/src/runner.rs
new file mode 100644
index 0000000..c40e05d
--- /dev/null
+++ b/runner/src/runner.rs
@@ -0,0 +1,83 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#[cfg(unix)]
+use std::os::unix::process::ExitStatusExt;
+use std::process::Command;
+use std::{path::PathBuf, process::ExitCode};
+
+use anyhow::{Context, Result};
+use itertools::Itertools;
+
+/// Runner for dispatching antlion.
+pub(crate) trait Runner {
+    /// Run antlion using the provided config and output directory.
+    fn run(&self, config: PathBuf) -> Result<ExitStatus>;
+}
+
+/// Executes antlion as a local process.
+pub(crate) struct ProcessRunner {
+    pub python_bin: String,
+    pub antlion_pyz: PathBuf,
+}
+
+impl Runner for ProcessRunner {
+    fn run(&self, config: PathBuf) -> Result<ExitStatus> {
+        let args = [
+            &self.antlion_pyz.clone().into_os_string().into_string().unwrap(),
+            "--config",
+            &config.into_os_string().into_string().unwrap(),
+        ];
+
+        println!(
+            "Launching antlion to run: \"{} {}\"\n",
+            &self.python_bin,
+            args.iter().format(" "),
+        );
+
+        let status = Command::new(&self.python_bin)
+            .args(args)
+            .status()
+            .context("Failed to execute antlion")?;
+
+        Ok(ExitStatus::from(status))
+    }
+}
+
+/// Describes the result of a child process after it has terminated.
+pub(crate) enum ExitStatus {
+    /// Process terminated without error.
+    Ok,
+    /// Process terminated with a non-zero status code.
+    Err(i32),
+    /// Process was interrupted by a signal.
+    Interrupt(Option<i32>),
+}
+
+impl From<std::process::ExitStatus> for ExitStatus {
+    fn from(status: std::process::ExitStatus) -> Self {
+        match status.code() {
+            Some(0) => ExitStatus::Ok,
+            Some(code) => ExitStatus::Err(code),
+            None if cfg!(target_os = "unix") => ExitStatus::Interrupt(status.signal()),
+            None => ExitStatus::Interrupt(None),
+        }
+    }
+}
+
+impl Into<ExitCode> for ExitStatus {
+    fn into(self) -> ExitCode {
+        match self {
+            ExitStatus::Ok => ExitCode::SUCCESS,
+            ExitStatus::Err(code) => {
+                let code = match u8::try_from(code) {
+                    Ok(c) => c,
+                    Err(_) => 1,
+                };
+                ExitCode::from(code)
+            }
+            ExitStatus::Interrupt(_) => ExitCode::FAILURE,
+        }
+    }
+}
diff --git a/runner/src/yaml.rs b/runner/src/yaml.rs
new file mode 100644
index 0000000..ae972bf
--- /dev/null
+++ b/runner/src/yaml.rs
@@ -0,0 +1,95 @@
+use serde_yaml::Value;
+
+/// Merge `b` into `a`, appending arrays and overwriting everything else.
+pub fn merge(a: &mut Value, b: Value) {
+    match (a, b) {
+        (Value::Mapping(ref mut a), Value::Mapping(b)) => {
+            for (k, v) in b {
+                if !a.contains_key(&k) {
+                    a.insert(k, v);
+                } else {
+                    merge(&mut a[&k], v);
+                }
+            }
+        }
+        (Value::Sequence(ref mut a), Value::Sequence(ref mut b)) => {
+            a.append(b);
+        }
+        (a, b) => *a = b,
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use super::*;
+
+    #[test]
+    fn test_merge_mapping() {
+        let a = "
+            test_params:
+                name: a
+                who_called:
+                    was_a: true
+        ";
+        let mut a: Value = serde_yaml::from_str(a).unwrap();
+        let b = "
+            test_params:
+                name: b
+                who_called:
+                    was_b: true
+        ";
+        let b: Value = serde_yaml::from_str(b).unwrap();
+        merge(&mut a, b);
+        let want = "
+            test_params:
+                name: b
+                who_called:
+                    was_a: true
+                    was_b: true
+        ";
+        let want: Value = serde_yaml::from_str(want).unwrap();
+        assert_eq!(a, want);
+    }
+
+    #[test]
+    fn test_merge_append_arrays() {
+        let mut a: Value = serde_yaml::from_str(" - a").unwrap();
+        let b: Value = serde_yaml::from_str(" - b").unwrap();
+        merge(&mut a, b);
+        let want = "
+            - a
+            - b
+        ";
+        let want: Value = serde_yaml::from_str(want).unwrap();
+        assert_eq!(a, want);
+    }
+
+    #[test]
+    fn test_merge_append_arrays_allow_duplicates() {
+        let mut a: Value = serde_yaml::from_str(" - a").unwrap();
+        let b: Value = serde_yaml::from_str(" - a").unwrap();
+        merge(&mut a, b);
+        let want = "
+            - a
+            - a
+        ";
+        let want: Value = serde_yaml::from_str(want).unwrap();
+        assert_eq!(a, want);
+    }
+
+    #[test]
+    fn test_merge_overwrite_from_null() {
+        let mut a: Value = Value::Null;
+        let b: Value = serde_yaml::from_str("true").unwrap();
+        merge(&mut a, b.clone());
+        assert_eq!(a, b);
+    }
+
+    #[test]
+    fn test_merge_overwrite_with_null() {
+        let mut a: Value = serde_yaml::from_str("true").unwrap();
+        let b: Value = Value::Null;
+        merge(&mut a, b.clone());
+        assert_eq!(a, b);
+    }
+}
diff --git a/setup.py b/setup.py
index 30f198d..28f080f 100644
--- a/setup.py
+++ b/setup.py
@@ -20,33 +20,32 @@
     "mobly==1.12.0",
     "pyyaml>=5.1",
     "tenacity~=8.0",
+    # TODO(b/240443856): Remove these dependencies once antlion runs in
+    # Fuchsia's LUCI infrastructure. These are needed for flashing and using
+    # mDNS discovery, which are unnecessary in the future infrastructure.
+    "usbinfo",
+    "psutil",
+    "zeroconf",
 ]
 
-setup(name='antlion',
-      version='0.2.0',
-      description = "Host-driven, hardware-agnostic Fuchsia connectivity tests",
-      license='Apache-2.0',
-      packages=find_packages(
-          where='src',
-      ),
-      package_dir={"": "src"},
-      include_package_data=True,
-      tests_require=[],
-      install_requires=install_requires,
-      extras_require={
-          'html_graphing': ['bokeh'],
-          'dev': ['shiv', 'toml', 'yapf'],
-          'digital_loggers_pdu': ['dlipower'],
-          'flash': ['usbinfo'],
-          'mdns': ['psutil', 'zeroconf'],
-          'android': [
-              'Monsoon',
-              'numpy',
-              'paramiko[ed25519]',
-              'pylibftdi',
-              'pyserial',
-              'requests',
-              'scapy',
-              'scipy',
-          ],
-      })
+setup(
+    name="antlion",
+    version="0.2.0",
+    description="Host-driven, hardware-agnostic Fuchsia connectivity tests",
+    license="Apache-2.0",
+    packages=find_packages(
+        where="src",
+    ),
+    package_dir={"": "src"},
+    include_package_data=True,
+    tests_require=[],
+    install_requires=install_requires,
+    extras_require={
+        "html_graphing": ["bokeh"],
+        "digital_loggers_pdu": ["dlipower"],
+        "android": [
+            "numpy",
+            "scapy",
+        ],
+    },
+)
diff --git a/src/antlion/asserts.py b/src/antlion/asserts.py
deleted file mode 100644
index ce0a7b1..0000000
--- a/src/antlion/asserts.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from mobly.asserts import *
-
-
-# Have an instance of unittest.TestCase so we could reuse some logic from
-# python's own unittest.
-# _ProxyTest is required because py2 does not allow instantiating
-# unittest.TestCase directly.
-class _ProxyTest(unittest.TestCase):
-    def runTest(self):
-        pass
-
-
-_pyunit_proxy = _ProxyTest()
-
-
-def assert_almost_equal(first,
-                        second,
-                        places=7,
-                        msg=None,
-                        delta=None,
-                        extras=None):
-    """
-    Assert FIRST to be within +/- DELTA to SECOND, otherwise fail the
-    test.
-    :param first: The first argument, LHS
-    :param second: The second argument, RHS
-    :param places: For floating points, how many decimal places to look into
-    :param msg: Message to display on failure
-    :param delta: The +/- first and second could be apart from each other
-    :param extras: Extra object passed to test failure handler
-    :return:
-    """
-    my_msg = None
-    try:
-        if delta:
-            _pyunit_proxy.assertAlmostEqual(
-                first, second, msg=msg, delta=delta)
-        else:
-            _pyunit_proxy.assertAlmostEqual(
-                first, second, places=places, msg=msg)
-    except Exception as e:
-        my_msg = str(e)
-        if msg:
-            my_msg = "%s %s" % (my_msg, msg)
-    # This is a hack to remove the stacktrace produced by the above exception.
-    if my_msg is not None:
-        fail(my_msg, extras=extras)
diff --git a/src/antlion/base_test.py b/src/antlion/base_test.py
index 5e39933..5033552 100755
--- a/src/antlion/base_test.py
+++ b/src/antlion/base_test.py
@@ -22,7 +22,6 @@
 import traceback
 from concurrent.futures import ThreadPoolExecutor
 
-from antlion import asserts
 from antlion import error
 from antlion import keys
 from antlion import logger
@@ -39,6 +38,7 @@
 from antlion.event.event import TestClassEndEvent
 from antlion.event.subscription_bundle import SubscriptionBundle
 
+from mobly import asserts
 from mobly.base_test import BaseTestClass as MoblyBaseTest
 from mobly.records import ExceptionRecord
 
@@ -50,26 +50,24 @@
 @subscribe_static(TestCaseBeginEvent)
 def _logcat_log_test_begin(event):
     """Ensures that logcat is running. Write a logcat line indicating test case
-     begin."""
+    begin."""
     test_instance = event.test_class
     try:
-        for ad in getattr(test_instance, 'android_devices', []):
+        for ad in getattr(test_instance, "android_devices", []):
             if not ad.is_adb_logcat_on:
                 ad.start_adb_logcat()
             # Write test start token to adb log if android device is attached.
             if not ad.skip_sl4a and ad.droid:
-                ad.droid.logV("%s BEGIN %s" %
-                              (TEST_CASE_TOKEN, event.test_case_name))
+                ad.droid.logV("%s BEGIN %s" % (TEST_CASE_TOKEN, event.test_case_name))
 
     except error.ActsError as e:
         test_instance.results.error.append(
-            ExceptionRecord(e, 'Logcat for test begin: %s' %
-                            event.test_case_name))
-        test_instance.log.error('BaseTest setup_test error: %s' % e.details)
+            ExceptionRecord(e, "Logcat for test begin: %s" % event.test_case_name)
+        )
+        test_instance.log.error("BaseTest setup_test error: %s" % e.details)
     except Exception as e:
-        test_instance.log.warning(
-            'Unable to send BEGIN log command to all devices.')
-        test_instance.log.warning('Error: %s' % e)
+        test_instance.log.warning("Unable to send BEGIN log command to all devices.")
+        test_instance.log.warning("Error: %s" % e)
 
 
 @subscribe_static(TestCaseEndEvent)
@@ -78,20 +76,18 @@
     test_instance = event.test_class
     try:
         # Write test end token to adb log if android device is attached.
-        for ad in getattr(test_instance, 'android_devices', []):
+        for ad in getattr(test_instance, "android_devices", []):
             if not ad.skip_sl4a and ad.droid:
-                ad.droid.logV("%s END %s" %
-                              (TEST_CASE_TOKEN, event.test_case_name))
+                ad.droid.logV("%s END %s" % (TEST_CASE_TOKEN, event.test_case_name))
 
     except error.ActsError as e:
         test_instance.results.error.append(
-            ExceptionRecord(e,
-                            'Logcat for test end: %s' % event.test_case_name))
-        test_instance.log.error('BaseTest teardown_test error: %s' % e.details)
+            ExceptionRecord(e, "Logcat for test end: %s" % event.test_case_name)
+        )
+        test_instance.log.error("BaseTest teardown_test error: %s" % e.details)
     except Exception as e:
-        test_instance.log.warning(
-            'Unable to send END log command to all devices.')
-        test_instance.log.warning('Error: %s' % e)
+        test_instance.log.warning("Unable to send END log command to all devices.")
+        test_instance.log.warning("Error: %s" % e)
 
 
 @subscribe_static(TestCaseBeginEvent)
@@ -100,15 +96,15 @@
     Fuchsia device"""
     test_instance = event.test_class
     try:
-        for fd in getattr(test_instance, 'fuchsia_devices', []):
-            if hasattr(fd, '_sl4f'):
+        for fd in getattr(test_instance, "fuchsia_devices", []):
+            if hasattr(fd, "_sl4f"):
                 fd.sl4f.logging_lib.logI(
-                    "%s BEGIN %s" % (TEST_CASE_TOKEN, event.test_case_name))
+                    "%s BEGIN %s" % (TEST_CASE_TOKEN, event.test_case_name)
+                )
 
     except Exception as e:
-        test_instance.log.warning(
-            'Unable to send BEGIN log command to all devices.')
-        test_instance.log.warning('Error: %s' % e)
+        test_instance.log.warning("Unable to send BEGIN log command to all devices.")
+        test_instance.log.warning("Error: %s" % e)
 
 
 @subscribe_static(TestCaseEndEvent)
@@ -117,15 +113,15 @@
     Fuchsia device"""
     test_instance = event.test_class
     try:
-        for fd in getattr(test_instance, 'fuchsia_devices', []):
-            if hasattr(fd, '_sl4f'):
+        for fd in getattr(test_instance, "fuchsia_devices", []):
+            if hasattr(fd, "_sl4f"):
                 fd.sl4f.logging_lib.logI(
-                    "%s END %s" % (TEST_CASE_TOKEN, event.test_case_name))
+                    "%s END %s" % (TEST_CASE_TOKEN, event.test_case_name)
+                )
 
     except Exception as e:
-        test_instance.log.warning(
-            'Unable to send END log command to all devices.')
-        test_instance.log.warning('Error: %s' % e)
+        test_instance.log.warning("Unable to send END log command to all devices.")
+        test_instance.log.warning("Error: %s" % e)
 
 
 event_bus.register_subscription(_logcat_log_test_begin.subscription)
@@ -193,7 +189,8 @@
 
         self.consecutive_failures = 0
         self.consecutive_failure_limit = self.user_params.get(
-            'consecutive_failure_limit', -1)
+            "consecutive_failure_limit", -1
+        )
         self.size_limit_reached = False
         self.retryable_exceptions = signals.TestFailure
 
@@ -211,8 +208,7 @@
         for ctrl_name in keys.Config.builtin_controller_names.value:
             if ctrl_name in self.controller_configs:
                 module_name = keys.get_module_name(ctrl_name)
-                module = importlib.import_module("antlion.controllers.%s" %
-                                                 module_name)
+                module = importlib.import_module("antlion.controllers.%s" % module_name)
                 builtin_controllers.append(module)
         return builtin_controllers
 
@@ -224,7 +220,7 @@
         without needing to make changes.
         """
         for key, value in self.user_params.items():
-            if key.endswith('files') and isinstance(value, dict):
+            if key.endswith("files") and isinstance(value, dict):
                 new_user_params = dict(value)
                 new_user_params.update(self.user_params)
                 self.user_params = new_user_params
@@ -243,15 +239,12 @@
         Returns:
             A string corresponding to the module's name.
         """
-        if hasattr(a_module, 'ACTS_CONTROLLER_REFERENCE_NAME'):
+        if hasattr(a_module, "ACTS_CONTROLLER_REFERENCE_NAME"):
             return a_module.ACTS_CONTROLLER_REFERENCE_NAME
         else:
-            return a_module.__name__.split('.')[-1]
+            return a_module.__name__.split(".")[-1]
 
-    def register_controller(self,
-                            controller_module,
-                            required=True,
-                            builtin=False):
+    def register_controller(self, controller_module, required=True, builtin=False):
         """Registers an ACTS controller module for a test class. Invokes Mobly's
         implementation of register_controller.
 
@@ -318,7 +311,8 @@
 
         # Get controller objects from Mobly's register_controller
         controllers = self._controller_manager.register_controller(
-            controller_module, required=required)
+            controller_module, required=required
+        )
         if not controllers:
             return None
 
@@ -326,8 +320,7 @@
         # Implementation of "get_info" is optional for a controller module.
         if hasattr(controller_module, "get_info"):
             controller_info = controller_module.get_info(controllers)
-            self.log.info("Controller %s: %s", module_config_name,
-                          controller_info)
+            self.log.info("Controller %s: %s", module_config_name, controller_info)
 
         if builtin:
             setattr(self, module_ref_name, controllers)
@@ -359,7 +352,7 @@
 
         # Skip the test if the consecutive test case failure limit is reached.
         if self.consecutive_failures == self.consecutive_failure_limit:
-            raise signals.TestError('Consecutive test failure')
+            raise signals.TestError("Consecutive test failure")
 
         return self.setup_test()
 
@@ -379,7 +372,7 @@
         """Proxy function to guarantee the base implementation of teardown_test
         is called.
         """
-        self.log.debug('Tearing down test %s' % test_name)
+        self.log.debug("Tearing down test %s" % test_name)
         self.teardown_test()
 
     def _on_fail(self, record):
@@ -502,8 +495,11 @@
         except signals.TestAbortAll:
             raise
         except Exception as e:
-            self.log.exception("Exception happened when executing %s for %s.",
-                               func.__name__, self.current_test_name)
+            self.log.exception(
+                "Exception happened when executing %s for %s.",
+                func.__name__,
+                self.current_test_name,
+            )
             tr_record.add_error(func.__name__, e)
 
     def exec_one_testcase(self, test_name, test_func):
@@ -527,8 +523,8 @@
         self.log.info("%s %s", TEST_CASE_TOKEN, test_name)
 
         # Enable test retry if specified in the ACTS config
-        retry_tests = self.user_params.get('retry_tests', [])
-        full_test_name = '%s.%s' % (class_name, self.test_name)
+        retry_tests = self.user_params.get("retry_tests", [])
+        full_test_name = "%s.%s" % (class_name, self.test_name)
         if any(name in retry_tests for name in [class_name, full_test_name]):
             test_func = self.get_func_with_retry(test_func)
 
@@ -537,8 +533,9 @@
         try:
             try:
                 ret = self._setup_test(self.test_name)
-                asserts.assert_true(ret is not False,
-                                    "Setup for %s failed." % test_name)
+                asserts.assert_true(
+                    ret is not False, "Setup for %s failed." % test_name
+                )
                 verdict = test_func()
             finally:
                 try:
@@ -551,7 +548,8 @@
         except (signals.TestFailure, AssertionError) as e:
             test_signal = e
             if self.user_params.get(
-                    keys.Config.key_test_failure_tracebacks.value, False):
+                keys.Config.key_test_failure_tracebacks.value, False
+            ):
                 self.log.exception(e)
             tr_record.test_fail(e)
         except signals.TestSkip as e:
@@ -594,11 +592,11 @@
                     self._exec_procedure_func(self._on_fail, tr_record)
             finally:
                 self.results.add_record(tr_record)
-                self.summary_writer.dump(tr_record.to_dict(),
-                                         records.TestSummaryEntryType.RECORD)
+                self.summary_writer.dump(
+                    tr_record.to_dict(), records.TestSummaryEntryType.RECORD
+                )
                 self.current_test_name = None
-                event_bus.post(
-                    TestCaseEndEvent(self, self.test_name, test_signal))
+                event_bus.post(TestCaseEndEvent(self, self.test_name, test_signal))
 
     def get_func_with_retry(self, func, attempts=2):
         """Returns a wrapped test method that re-runs after failure. Return test
@@ -626,23 +624,25 @@
                     return func(*args, **kwargs)
                 except exceptions as e:
                     retry = True
-                    msg = 'Failure on attempt %d: %s' % (i + 1, e.details)
+                    msg = "Failure on attempt %d: %s" % (i + 1, e.details)
                     self.log.warning(msg)
                     error_msgs.append(msg)
                     if e.extras:
-                        extras['Attempt %d' % (i + 1)] = e.extras
-            raise signals.TestFailure('\n'.join(error_msgs), extras)
+                        extras["Attempt %d" % (i + 1)] = e.extras
+            raise signals.TestFailure("\n".join(error_msgs), extras)
 
         return wrapper
 
-    def run_generated_testcases(self,
-                                test_func,
-                                settings,
-                                args=None,
-                                kwargs=None,
-                                tag="",
-                                name_func=None,
-                                format_args=False):
+    def run_generated_testcases(
+        self,
+        test_func,
+        settings,
+        args=None,
+        kwargs=None,
+        tag="",
+        name_func=None,
+        format_args=False,
+    ):
         """Deprecated. Please use pre_run and generate_tests.
 
         Generated test cases are not written down as functions, but as a list
@@ -681,27 +681,31 @@
                 try:
                     test_name = name_func(setting, *args, **kwargs)
                 except:
-                    self.log.exception(("Failed to get test name from "
-                                        "test_func. Fall back to default %s"),
-                                       test_name)
+                    self.log.exception(
+                        (
+                            "Failed to get test name from "
+                            "test_func. Fall back to default %s"
+                        ),
+                        test_name,
+                    )
 
             self.results.requested.append(test_name)
 
             if len(test_name) > utils.MAX_FILENAME_LEN:
-                test_name = test_name[:utils.MAX_FILENAME_LEN]
+                test_name = test_name[: utils.MAX_FILENAME_LEN]
 
             previous_success_cnt = len(self.results.passed)
 
             if format_args:
                 self.exec_one_testcase(
                     test_name,
-                    functools.partial(test_func, *(args + (setting, )),
-                                      **kwargs))
+                    functools.partial(test_func, *(args + (setting,)), **kwargs),
+                )
             else:
                 self.exec_one_testcase(
                     test_name,
-                    functools.partial(test_func, *((setting, ) + args),
-                                      **kwargs))
+                    functools.partial(test_func, *((setting,) + args), **kwargs),
+                )
 
             if len(self.results.passed) - previous_success_cnt != 1:
                 failed_settings.append(setting)
@@ -727,11 +731,12 @@
         except signals.TestAbortAll:
             raise
         except:
-            self.log.exception("Exception happened when executing %s in %s.",
-                               func.__name__, self.TAG)
+            self.log.exception(
+                "Exception happened when executing %s in %s.", func.__name__, self.TAG
+            )
             return False
 
-    def _block_all_test_cases(self, tests, reason='Failed class setup'):
+    def _block_all_test_cases(self, tests, reason="Failed class setup"):
         """
         Block all passed in test cases.
         Args:
@@ -743,12 +748,13 @@
             signal = signals.TestError(reason)
             record = records.TestResultRecord(test_name, self.TAG)
             record.test_begin()
-            if hasattr(test_func, 'gather'):
+            if hasattr(test_func, "gather"):
                 signal.extras = test_func.gather()
             record.test_error(signal)
             self.results.add_record(record)
-            self.summary_writer.dump(record.to_dict(),
-                                     records.TestSummaryEntryType.RECORD)
+            self.summary_writer.dump(
+                record.to_dict(), records.TestSummaryEntryType.RECORD
+            )
             self._on_skip(record)
 
     def run(self, test_names=None):
@@ -788,14 +794,18 @@
             matches = []
             for test_name in test_names:
                 for valid_test in valid_tests:
-                    if (fnmatch.fnmatch(valid_test, test_name)
-                            and valid_test not in matches):
+                    if (
+                        fnmatch.fnmatch(valid_test, test_name)
+                        and valid_test not in matches
+                    ):
                         matches.append(valid_test)
         else:
             matches = valid_tests
         self.results.requested = matches
-        self.summary_writer.dump(self.results.requested_test_names_dict(),
-                                 records.TestSummaryEntryType.TEST_NAME_LIST)
+        self.summary_writer.dump(
+            self.results.requested_test_names_dict(),
+            records.TestSummaryEntryType.TEST_NAME_LIST,
+        )
         tests = self._get_test_methods(matches)
 
         # Setup for the class.
@@ -806,7 +816,7 @@
                 self._block_all_test_cases(tests)
                 setup_fail = True
         except signals.TestAbortClass:
-            self.log.exception('Test class %s aborted' % self.TAG)
+            self.log.exception("Test class %s aborted" % self.TAG)
             setup_fail = True
         except Exception as e:
             self.log.exception("Failed to setup %s.", self.TAG)
@@ -814,17 +824,21 @@
             setup_fail = True
         if setup_fail:
             self._exec_func(self._teardown_class)
-            self.log.info("Summary for test class %s: %s", self.TAG,
-                          self.results.summary_str())
+            self.log.info(
+                "Summary for test class %s: %s", self.TAG, self.results.summary_str()
+            )
             return self.results
 
         # Run tests in order.
         test_case_iterations = self.user_params.get(
-            keys.Config.key_test_case_iterations.value, 1)
-        if any([
+            keys.Config.key_test_case_iterations.value, 1
+        )
+        if any(
+            [
                 substr in self.__class__.__name__
-                for substr in ['Preflight', 'Postflight']
-        ]):
+                for substr in ["Preflight", "Postflight"]
+            ]
+        ):
             test_case_iterations = 1
         try:
             for test_name, test_func in tests:
@@ -832,7 +846,7 @@
                     self.exec_one_testcase(test_name, test_func)
             return self.results
         except signals.TestAbortClass:
-            self.log.exception('Test class %s aborted' % self.TAG)
+            self.log.exception("Test class %s aborted" % self.TAG)
             return self.results
         except signals.TestAbortAll as e:
             # Piggy-back test results on this exception object so we don't lose
@@ -841,8 +855,9 @@
             raise e
         finally:
             self._exec_func(self._teardown_class)
-            self.log.info("Summary for test class %s: %s", self.TAG,
-                          self.results.summary_str())
+            self.log.info(
+                "Summary for test class %s: %s", self.TAG, self.results.summary_str()
+            )
 
     def _ad_take_bugreport(self, ad, test_name, begin_time):
         for i in range(3):
@@ -863,15 +878,17 @@
             try:
                 ad.get_qxdm_logs(test_name, qxdm_begin_time)
             except Exception as e:
-                ad.log.error("Failed to get QXDM log for %s with error %s",
-                             test_name, e)
+                ad.log.error(
+                    "Failed to get QXDM log for %s with error %s", test_name, e
+                )
                 result = False
 
         try:
             ad.check_crash_report(test_name, begin_time, log_crash_report=True)
         except Exception as e:
-            ad.log.error("Failed to check crash report for %s with error %s",
-                         test_name, e)
+            ad.log.error(
+                "Failed to check crash report for %s with error %s", test_name, e
+            )
             result = False
         return result
 
@@ -890,15 +907,13 @@
         # problematic tests, we skip bugreport and other failure artifact
         # creation.
         class_name = self.__class__.__name__
-        quiet_tests = self.user_params.get('quiet_tests', [])
+        quiet_tests = self.user_params.get("quiet_tests", [])
         if class_name in quiet_tests:
-            self.log.info(
-                "Skipping bug report, as directed for this test class.")
+            self.log.info("Skipping bug report, as directed for this test class.")
             return True
-        full_test_name = '%s.%s' % (class_name, test_name)
+        full_test_name = "%s.%s" % (class_name, test_name)
         if full_test_name in quiet_tests:
-            self.log.info(
-                "Skipping bug report, as directed for this test case.")
+            self.log.info("Skipping bug report, as directed for this test case.")
             return True
 
         # Once we hit a certain log path size, it's not going to get smaller.
@@ -907,7 +922,8 @@
             return True
         try:
             max_log_size = int(
-                self.user_params.get("soft_output_size_limit") or "invalid")
+                self.user_params.get("soft_output_size_limit") or "invalid"
+            )
             log_path = getattr(logging, "log_path", None)
             if log_path:
                 curr_log_size = utils.get_directory_size(log_path)
@@ -926,10 +942,9 @@
             return
 
         executor = ThreadPoolExecutor(max_workers=10)
-        for ad in getattr(self, 'android_devices', []):
+        for ad in getattr(self, "android_devices", []):
             executor.submit(self._ad_take_bugreport, ad, test_name, begin_time)
-            executor.submit(self._ad_take_extra_logs, ad, test_name,
-                            begin_time)
+            executor.submit(self._ad_take_extra_logs, ad, test_name, begin_time)
         executor.shutdown()
 
     def _reboot_device(self, ad):
@@ -937,24 +952,23 @@
         ad = ad.reboot()
 
     def _cleanup_logger_sessions(self):
-        for (mylogger, session) in self.logger_sessions:
-            self.log.info("Resetting a diagnostic session %s, %s", mylogger,
-                          session)
+        for mylogger, session in self.logger_sessions:
+            self.log.info("Resetting a diagnostic session %s, %s", mylogger, session)
             mylogger.reset()
         self.logger_sessions = []
 
     def _pull_diag_logs(self, test_name, begin_time):
-        for (mylogger, session) in self.logger_sessions:
+        for mylogger, session in self.logger_sessions:
             self.log.info("Pulling diagnostic session %s", mylogger)
             mylogger.stop(session)
             diag_path = os.path.join(
-                self.log_path, logger.epoch_to_log_line_timestamp(begin_time))
+                self.log_path, logger.epoch_to_log_line_timestamp(begin_time)
+            )
             os.makedirs(diag_path, exist_ok=True)
             mylogger.pull(session, diag_path)
 
     def register_test_class_event_subscriptions(self):
-        self.class_subscriptions = subscription_bundle.create_from_instance(
-            self)
+        self.class_subscriptions = subscription_bundle.create_from_instance(self)
         self.class_subscriptions.register()
 
     def unregister_test_class_event_subscriptions(self):
diff --git a/src/antlion/bin/act.py b/src/antlion/bin/act.py
index 81d0452..2f78645 100755
--- a/src/antlion/bin/act.py
+++ b/src/antlion/bin/act.py
@@ -54,8 +54,7 @@
     except signals.TestAbortAll:
         return True
     except:
-        print("Exception when executing %s, iteration %s." %
-              (runner.testbed_name, i))
+        print("Exception when executing %s, iteration %s." % (runner.testbed_name, i))
         print(traceback.format_exc())
     finally:
         runner.stop()
@@ -109,8 +108,10 @@
             ret = _run_test(c, test_identifiers, repeat)
             ok = ok and ret
         except Exception as e:
-            print("Exception occurred when executing test bed %s. %s" %
-                  (c.testbed_name, e))
+            print(
+                "Exception occurred when executing test bed %s. %s"
+                % (c.testbed_name, e)
+            )
     return ok
 
 
@@ -122,63 +123,80 @@
     functions and acts.test_runner.execute_one_test_class.
     """
     parser = argparse.ArgumentParser(
-        description=("Specify tests to run. If nothing specified, "
-                     "run all test cases found."))
-    parser.add_argument('-c',
-                        '--config',
-                        type=str,
-                        required=True,
-                        metavar="<PATH>",
-                        help="Path to the test configuration file.")
+        description=(
+            "Specify tests to run. If nothing specified, " "run all test cases found."
+        )
+    )
     parser.add_argument(
-        '-ci',
-        '--campaign_iterations',
+        "-c",
+        "--config",
+        type=str,
+        required=True,
+        metavar="<PATH>",
+        help="Path to the test configuration file.",
+    )
+    parser.add_argument(
+        "-ci",
+        "--campaign_iterations",
         metavar="<CAMPAIGN_ITERATIONS>",
-        nargs='?',
+        nargs="?",
         type=int,
         const=1,
         default=1,
-        help="Number of times to run the campaign or a group of test cases.")
-    parser.add_argument('-tb',
-                        '--testbed',
-                        nargs='+',
-                        type=str,
-                        metavar="[<TEST BED NAME1> <TEST BED NAME2> ...]",
-                        help="Specify which test beds to run tests on.")
-    parser.add_argument('-lp',
-                        '--logpath',
-                        type=str,
-                        metavar="<PATH>",
-                        help="Root path under which all logs will be placed.")
+        help="Number of times to run the campaign or a group of test cases.",
+    )
     parser.add_argument(
-        '-tp',
-        '--testpaths',
-        nargs='*',
+        "-tb",
+        "--testbed",
+        nargs="+",
+        type=str,
+        metavar="[<TEST BED NAME1> <TEST BED NAME2> ...]",
+        help="Specify which test beds to run tests on.",
+    )
+    parser.add_argument(
+        "-lp",
+        "--logpath",
+        type=str,
+        metavar="<PATH>",
+        help="Root path under which all logs will be placed.",
+    )
+    parser.add_argument(
+        "-tp",
+        "--testpaths",
+        nargs="*",
         type=str,
         metavar="<PATH> <PATH>",
-        help="One or more non-recursive test class search paths.")
+        help="One or more non-recursive test class search paths.",
+    )
 
     group = parser.add_mutually_exclusive_group(required=True)
-    group.add_argument('-tc',
-                       '--testclass',
-                       nargs='+',
-                       type=str,
-                       metavar="[TestClass1 TestClass2:test_xxx ...]",
-                       help="A list of test classes/cases to run.")
     group.add_argument(
-        '-tf',
-        '--testfile',
+        "-tc",
+        "--testclass",
+        nargs="+",
+        type=str,
+        metavar="[TestClass1 TestClass2:test_xxx ...]",
+        help="A list of test classes/cases to run.",
+    )
+    group.add_argument(
+        "-tf",
+        "--testfile",
         nargs=1,
         type=str,
         metavar="<PATH>",
-        help=("Path to a file containing a comma delimited list of test "
-              "classes to run."))
-    parser.add_argument('-ti',
-                        '--test_case_iterations',
-                        metavar="<TEST_CASE_ITERATIONS>",
-                        nargs='?',
-                        type=int,
-                        help="Number of times to run every test case.")
+        help=(
+            "Path to a file containing a comma delimited list of test "
+            "classes to run."
+        ),
+    )
+    parser.add_argument(
+        "-ti",
+        "--test_case_iterations",
+        metavar="<TEST_CASE_ITERATIONS>",
+        nargs="?",
+        type=int,
+        help="Number of times to run every test case.",
+    )
 
     args = parser.parse_args(sys.argv[1:])
     test_list = None
@@ -186,12 +204,18 @@
         test_list = config_parser.parse_test_file(args.testfile[0])
     elif args.testclass:
         test_list = args.testclass
-    if re.search(r'\.ya?ml$', args.config):
-        parsed_configs = mobly_config_parser.load_test_config_file(
-            args.config, args.testbed)
-    else:
-        parsed_configs = config_parser.load_test_config_file(
-            args.config, args.testbed)
+
+    config = args.config
+
+    if config.endswith(".json"):
+        print(
+            "DEPRECATION NOTICE: Converting ACTS JSON to Mobly YAML. ACTS is "
+            + "deprecated. Support will be removed in the next release."
+        )
+        config = utils.acts_json_to_mobly_yaml(config)
+        print(f"Wrote YAML config to {config}")
+
+    parsed_configs = mobly_config_parser.load_test_config_file(config, args.testbed)
 
     for test_run_config in parsed_configs:
         if args.testpaths:
@@ -205,10 +229,12 @@
 
         # Sets the --testpaths flag to the default test directory if left unset.
         testpath_key = keys.Config.key_test_paths.value
-        if (testpath_key not in test_run_config.controller_configs
-                or test_run_config.controller_configs[testpath_key] is None):
+        if (
+            testpath_key not in test_run_config.controller_configs
+            or test_run_config.controller_configs[testpath_key] is None
+        ):
             test_run_config.controller_configs[testpath_key] = [
-                os.path.join(os.path.dirname(__file__), '../tests/'),
+                os.path.join(os.path.dirname(__file__), "../tests/"),
             ]
 
         for path in test_run_config.controller_configs[testpath_key]:
@@ -217,15 +243,25 @@
         # TODO(markdr): Find a way to merge this with the validation done in
         # Mobly's load_test_config_file.
         if not test_run_config.log_path:
-            raise ActsConfigError("Required key %s missing in test config." %
-                                  keys.Config.key_log_path.value)
+            raise ActsConfigError(
+                "Required key %s missing in test config."
+                % keys.Config.key_log_path.value
+            )
         test_run_config.log_path = utils.abs_path(test_run_config.log_path)
 
     # Prepare args for test runs
     test_identifiers = config_parser.parse_test_list(test_list)
 
-    exec_result = _run_tests(parsed_configs, test_identifiers,
-                             args.campaign_iterations)
+    print(
+        "\n\nDEPRECATION NOTICE: Running antlion tests with act.py is "
+        "deprecated and will be removed in the next release. Please migrate "
+        "by using Mobly YAML configs and executing the test class directly:\n\n"
+    )
+    for test_class, _ in test_identifiers:
+        print(f"   python {test_class}.py -c {config}")
+    print("\n")
+
+    exec_result = _run_tests(parsed_configs, test_identifiers, args.campaign_iterations)
     if exec_result is False:
         # return 1 upon test failure.
         sys.exit(1)
diff --git a/src/antlion/libs/utils/__init__.py b/src/antlion/capabilities/__init__.py
similarity index 100%
rename from src/antlion/libs/utils/__init__.py
rename to src/antlion/capabilities/__init__.py
diff --git a/src/antlion/capabilities/ssh.py b/src/antlion/capabilities/ssh.py
new file mode 100644
index 0000000..eeb1e16
--- /dev/null
+++ b/src/antlion/capabilities/ssh.py
@@ -0,0 +1,377 @@
+#!/usr/bin/env python3
+#
+# Copyright 2023 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import subprocess
+import time
+
+from dataclasses import dataclass
+from typing import List, Union, BinaryIO
+
+from antlion import logger
+from antlion import signals
+from antlion.net import wait_for_port
+
+DEFAULT_SSH_PORT: int = 22
+DEFAULT_SSH_TIMEOUT_SEC: int = 60
+DEFAULT_SSH_CONNECT_TIMEOUT_SEC: int = 90
+DEFAULT_SSH_SERVER_ALIVE_INTERVAL: int = 30
+# The default package repository for all components.
+
+
+class SSHResult:
+    """Result of an SSH command."""
+
+    def __init__(
+        self, process: Union[subprocess.CompletedProcess, subprocess.CalledProcessError]
+    ) -> None:
+        self._raw_stdout = process.stdout
+        self._stderr = process.stderr.decode("utf-8", errors="replace")
+        self._exit_status: int = process.returncode
+
+    def __str__(self):
+        if self.exit_status == 0:
+            return self.stdout
+        return f'status {self.exit_status}, stdout: "{self.stdout}", stderr: "{self.stderr}"'
+
+    @property
+    def stdout(self) -> str:
+        if not hasattr(self, "_stdout"):
+            self._stdout = self._raw_stdout.decode("utf-8", errors="replace")
+        return self._stdout
+
+    @property
+    def stderr(self) -> str:
+        return self._stderr
+
+    @property
+    def exit_status(self) -> int:
+        return self._exit_status
+
+    @property
+    def raw_stdout(self) -> bytes:
+        return self._raw_stdout
+
+
+class SSHError(signals.TestError):
+    """A SSH command returned with a non-zero status code."""
+
+    def __init__(self, command: str, result: SSHResult):
+        super().__init__(f'SSH command "{command}" unexpectedly returned {result}')
+        self.result = result
+
+
+class SSHTimeout(signals.TestError):
+    """A SSH command timed out."""
+
+    def __init__(self, err: subprocess.TimeoutExpired):
+        super().__init__(
+            f'SSH command "{err.cmd}" timed out after {err.timeout}s, '
+            f'stdout="{err.stdout}", stderr="{err.stderr}"'
+        )
+
+
+class SSHTransportError(signals.TestError):
+    """Failure to send an SSH command."""
+
+
+@dataclass
+class SSHConfig:
+    """SSH client config."""
+
+    # SSH flags. See ssh(1) for full details.
+    user: str
+    host_name: str
+    identity_file: str
+
+    ssh_binary: str = "ssh"
+    config_file: str = "/dev/null"
+    port: int = 22
+
+    # SSH options. See ssh_config(5) for full details.
+    connect_timeout: int = DEFAULT_SSH_CONNECT_TIMEOUT_SEC
+    server_alive_interval: int = DEFAULT_SSH_SERVER_ALIVE_INTERVAL
+    strict_host_key_checking: bool = False
+    user_known_hosts_file: str = "/dev/null"
+    log_level: str = "ERROR"
+
+    def full_command(self, command: str, force_tty: bool = False) -> List[str]:
+        """Generate the complete command to execute command over SSH.
+
+        Args:
+            command: The command to run over SSH
+            force_tty: Force pseudo-terminal allocation. This can be used to
+                execute arbitrary screen-based programs on a remote machine,
+                which can be very useful, e.g. when implementing menu services.
+
+        Returns:
+            Arguments composing the complete call to SSH.
+        """
+        optional_flags = []
+        if force_tty:
+            # Multiple -t options force tty allocation, even if ssh has no local
+            # tty. This is necessary for launching ssh with subprocess without
+            # shell=True.
+            optional_flags.append("-tt")
+
+        return (
+            [
+                self.ssh_binary,
+                # SSH flags
+                "-i",
+                self.identity_file,
+                "-F",
+                self.config_file,
+                "-p",
+                str(self.port),
+                # SSH configuration options
+                "-o",
+                f"ConnectTimeout={self.connect_timeout}",
+                "-o",
+                f"ServerAliveInterval={self.server_alive_interval}",
+                "-o",
+                f'StrictHostKeyChecking={"yes" if self.strict_host_key_checking else "no"}',
+                "-o",
+                f"UserKnownHostsFile={self.user_known_hosts_file}",
+                "-o",
+                f"LogLevel={self.log_level}",
+            ]
+            + optional_flags
+            + [f"{self.user}@{self.host_name}"]
+            + command.split()
+        )
+
+
+class SSHProvider:
+    """Device-specific provider for SSH clients."""
+
+    def __init__(self, config: SSHConfig) -> None:
+        """
+        Args:
+            config: SSH client config
+        """
+        logger_tag = f"ssh | {config.host_name}"
+        if config.port != DEFAULT_SSH_PORT:
+            logger_tag += f":{config.port}"
+
+        # Check if the private key exists
+
+        self.log = logger.create_tagged_trace_logger(logger_tag)
+        self.config = config
+
+        try:
+            self.wait_until_reachable()
+            self.log.info("sshd is reachable")
+        except Exception as e:
+            raise TimeoutError("sshd is unreachable") from e
+
+    def wait_until_reachable(self) -> None:
+        """Wait for the device to become reachable via SSH.
+
+        Raises:
+            TimeoutError: connect_timeout has expired without a successful SSH
+                connection to the device
+            SSHTransportError: SSH is available on the device but
+                connect_timeout has expired and SSH fails to run
+            SSHTimeout: SSH is available on the device but connect_timeout has
+                expired and SSH takes too long to run a command
+        """
+        timeout_sec = self.config.connect_timeout
+        timeout = time.time() + timeout_sec
+        wait_for_port(self.config.host_name, self.config.port, timeout_sec=timeout_sec)
+
+        while True:
+            try:
+                self._run("echo", timeout_sec, False, None)
+                return
+            except SSHTransportError as e:
+                # Repeat if necessary; _run() can exit prematurely by receiving
+                # SSH transport errors. These errors can be caused by sshd not
+                # being fully initialized yet.
+                if time.time() < timeout:
+                    continue
+                else:
+                    raise e
+
+    def wait_until_unreachable(
+        self, interval_sec: int = 1, timeout_sec: int = DEFAULT_SSH_CONNECT_TIMEOUT_SEC
+    ) -> None:
+        """Wait for the device to become unreachable via SSH.
+
+        Args:
+            interval_sec: Seconds to wait between unreachability attempts
+            timeout_sec: Seconds to wait until raising TimeoutError
+
+        Raises:
+            TimeoutError: when timeout_sec has expired without an unsuccessful
+                SSH connection to the device
+        """
+        timeout = time.time() + timeout_sec
+
+        while True:
+            try:
+                wait_for_port(
+                    self.config.host_name, self.config.port, timeout_sec=interval_sec
+                )
+            except TimeoutError:
+                return
+
+            if time.time() < timeout:
+                raise TimeoutError(
+                    f"Connection to {self.config.host_name} is still reachable "
+                    f"after {timeout_sec}s"
+                )
+
+    def run(
+        self,
+        command: str,
+        timeout_sec: int = DEFAULT_SSH_TIMEOUT_SEC,
+        connect_retries: int = 3,
+        force_tty: bool = False,
+    ) -> SSHResult:
+        """Run a command on the device then exit.
+
+        Args:
+            command: String to send to the device.
+            timeout_sec: Seconds to wait for the command to complete.
+            connect_retries: Amount of times to retry connect on fail.
+            force_tty: Force pseudo-terminal allocation.
+
+        Raises:
+            SSHError: if the SSH command returns a non-zero status code
+            SSHTransportError: if SSH fails to run the command
+            SSHTimeout: if there is no response within timeout_sec
+
+        Returns:
+            SSHResults from the executed command.
+        """
+        return self._run_with_retry(
+            command, timeout_sec, connect_retries, force_tty, stdin=None
+        )
+
+    def _run_with_retry(
+        self,
+        command: str,
+        timeout_sec: int,
+        connect_retries: int,
+        force_tty: bool,
+        stdin: BinaryIO,
+    ) -> SSHResult:
+        err: Exception = ValueError("connect_retries cannot be 0")
+        for i in range(0, connect_retries):
+            try:
+                return self._run(command, timeout_sec, force_tty, stdin)
+            except SSHTransportError as e:
+                err = e
+                self.log.warn(f"Connect failed: {e}")
+        raise err
+
+    def _run(
+        self, command: str, timeout_sec: int, force_tty: bool, stdin: BinaryIO
+    ) -> SSHResult:
+        full_command = self.config.full_command(command, force_tty)
+        self.log.debug(
+            f'Running "{command}" (full command: "{" ".join(full_command)}")'
+        )
+        try:
+            process = subprocess.run(
+                full_command,
+                capture_output=True,
+                timeout=timeout_sec,
+                check=True,
+                stdin=stdin,
+            )
+        except subprocess.CalledProcessError as e:
+            if e.returncode == 255:
+                stderr = e.stderr.decode("utf-8", errors="replace")
+                if (
+                    "Name or service not known" in stderr
+                    or "Host does not exist" in stderr
+                ):
+                    raise SSHTransportError(
+                        f"Hostname {self.config.host_name} cannot be resolved to an address"
+                    ) from e
+                if "Connection timed out" in stderr:
+                    raise SSHTransportError(
+                        f"Failed to establish a connection to {self.config.host_name} within {timeout_sec}s"
+                    ) from e
+                if "Connection refused" in stderr:
+                    raise SSHTransportError(
+                        f"Connection refused by {self.config.host_name}"
+                    ) from e
+
+            raise SSHError(command, SSHResult(e)) from e
+        except subprocess.TimeoutExpired as e:
+            raise SSHTimeout(e) from e
+
+        return SSHResult(process)
+
+    def upload_file(
+        self,
+        local_path: str,
+        remote_path: str,
+        timeout_sec: int = DEFAULT_SSH_TIMEOUT_SEC,
+        connect_retries: int = 3,
+    ) -> None:
+        """Upload a file to the device.
+
+        Args:
+            local_path: Path to the file to upload
+            remote_path: Path on the remote device to place the uploaded file.
+            timeout_sec: Seconds to wait for the command to complete.
+            connect_retries: Amount of times to retry connect on fail.
+
+        Raises:
+            SSHError: if the SSH upload returns a non-zero status code
+            SSHTransportError: if SSH fails to run the upload command
+            SSHTimeout: if there is no response within timeout_sec
+        """
+        file = open(local_path, "rb")
+        self._run_with_retry(
+            f"cat > {remote_path}",
+            timeout_sec,
+            connect_retries,
+            force_tty=False,
+            stdin=file,
+        )
+
+    def download_file(
+        self,
+        remote_path: str,
+        local_path: str,
+        timeout_sec: int = DEFAULT_SSH_TIMEOUT_SEC,
+        connect_retries: int = 3,
+    ) -> None:
+        """Upload a file to the device.
+
+        Args:
+            remote_path: Path on the remote device to download.
+            local_path: Path on the host to the place the downloaded file.
+            timeout_sec: Seconds to wait for the command to complete.
+            connect_retries: Amount of times to retry connect on fail.
+
+        Raises:
+            SSHError: if the SSH command returns a non-zero status code
+            SSHTransportError: if SSH fails to run the command
+            SSHTimeout: if there is no response within timeout_sec
+        """
+        file = open(local_path, "rb")
+        return self._run_with_retry(
+            f"cat > {remote_path}",
+            timeout_sec,
+            connect_retries,
+            force_tty=False,
+            stdin=file,
+        )
diff --git a/src/antlion/config_parser.py b/src/antlion/config_parser.py
index 0cfb308..7f202ff 100755
--- a/src/antlion/config_parser.py
+++ b/src/antlion/config_parser.py
@@ -23,12 +23,12 @@
 from antlion import utils
 
 # An environment variable defining the base location for ACTS logs.
-_ENV_ACTS_LOGPATH = 'ACTS_LOGPATH'
+_ENV_ACTS_LOGPATH = "ACTS_LOGPATH"
 # An environment variable that enables test case failures to log stack traces.
-_ENV_TEST_FAILURE_TRACEBACKS = 'ACTS_TEST_FAILURE_TRACEBACKS'
+_ENV_TEST_FAILURE_TRACEBACKS = "ACTS_TEST_FAILURE_TRACEBACKS"
 # An environment variable defining the test search paths for ACTS.
-_ENV_ACTS_TESTPATHS = 'ACTS_TESTPATHS'
-_PATH_SEPARATOR = ':'
+_ENV_ACTS_TESTPATHS = "ACTS_TESTPATHS"
+_PATH_SEPARATOR = ":"
 
 
 class ActsConfigError(Exception):
@@ -43,13 +43,11 @@
     for k in keys.Config.reserved_keys.value:
         # TODO(markdr): Remove this continue after merging this with the
         # validation done in Mobly's load_test_config_file.
-        if (k == keys.Config.key_test_paths.value
-                or k == keys.Config.key_log_path.value):
+        if k == keys.Config.key_test_paths.value or k == keys.Config.key_log_path.value:
             continue
 
         if k not in test_config:
-            raise ActsConfigError("Required key %s missing in test config." %
-                                  k)
+            raise ActsConfigError("Required key %s missing in test config." % k)
 
 
 def _validate_testbed_name(name):
@@ -70,58 +68,26 @@
         raise ActsConfigError("Test bed names have to be string.")
     for l in name:
         if l not in utils.valid_filename_chars:
-            raise ActsConfigError(
-                "Char '%s' is not allowed in test bed names." % l)
+            raise ActsConfigError("Char '%s' is not allowed in test bed names." % l)
 
 
-def _update_file_paths(config, config_path):
-    """ Checks if the path entries are valid.
-
-    If the file path is invalid, assume it is a relative path and append
-    that to the config file path.
-
-    Args:
-        config : the config object to verify.
-        config_path : The path to the config file, which can be used to
-                      generate absolute paths from relative paths in configs.
-
-    Raises:
-        If the file path is invalid, ActsConfigError is raised.
-    """
-    # Check the file_path_keys and update if it is a relative path.
-    for file_path_key in keys.Config.file_path_keys.value:
-        if file_path_key in config:
-            config_file = config[file_path_key]
-            if type(config_file) is str:
-                if not os.path.isfile(config_file):
-                    config_file = os.path.join(config_path, config_file)
-                if not os.path.isfile(config_file):
-                    raise ActsConfigError(
-                        "Unable to load config %s from test "
-                        "config file.", config_file)
-                config[file_path_key] = config_file
-
-
-def _validate_testbed_configs(testbed_configs, config_path):
+def _validate_testbed_configs(testbed_configs):
     """Validates the testbed configurations.
 
     Args:
         testbed_configs: A list of testbed configuration json objects.
-        config_path : The path to the config file, which can be used to
-                      generate absolute paths from relative paths in configs.
 
     Raises:
         If any part of the configuration is invalid, ActsConfigError is raised.
     """
     # Cross checks testbed configs for resource conflicts.
-    for name, config in testbed_configs.items():
-        _update_file_paths(config, config_path)
+    for name in testbed_configs:
         _validate_testbed_name(name)
 
 
 def gen_term_signal_handler(test_runners):
     def termination_sig_handler(signal_num, frame):
-        print('Received sigterm %s.' % signal_num)
+        print("Received sigterm %s." % signal_num)
         for t in test_runners:
             t.stop()
         sys.exit(1)
@@ -141,7 +107,7 @@
         name, the list of strings is a list of test case names. The list can be
         None.
     """
-    tokens = item.split(':')
+    tokens = item.split(":")
     if len(tokens) > 2:
         raise ActsConfigError("Syntax error in test specifier %s" % item)
     if len(tokens) == 1:
@@ -152,7 +118,7 @@
         # This should be considered a test class name followed by
         # a list of test case names.
         test_cls_name, test_case_names = tokens
-        clean_names = [elem.strip() for elem in test_case_names.split(',')]
+        clean_names = [elem.strip() for elem in test_case_names.split(",")]
         return test_cls_name, clean_names
 
 
@@ -206,44 +172,49 @@
             else:
                 raise ActsConfigError(
                     'Expected testbed named "%s", but none was found. Check '
-                    'if you have the correct testbed names.' % name)
+                    "if you have the correct testbed names." % name
+                )
         testbeds = tbs
 
-    if (keys.Config.key_log_path.value not in configs
-            and _ENV_ACTS_LOGPATH in os.environ):
-        print('Using environment log path: %s' %
-              (os.environ[_ENV_ACTS_LOGPATH]))
+    if (
+        keys.Config.key_log_path.value not in configs
+        and _ENV_ACTS_LOGPATH in os.environ
+    ):
+        print("Using environment log path: %s" % (os.environ[_ENV_ACTS_LOGPATH]))
         configs[keys.Config.key_log_path.value] = os.environ[_ENV_ACTS_LOGPATH]
-    if (keys.Config.key_test_paths.value not in configs
-            and _ENV_ACTS_TESTPATHS in os.environ):
-        print('Using environment test paths: %s' %
-              (os.environ[_ENV_ACTS_TESTPATHS]))
-        configs[keys.Config.key_test_paths.
-                value] = os.environ[_ENV_ACTS_TESTPATHS].split(_PATH_SEPARATOR)
-    if (keys.Config.key_test_failure_tracebacks not in configs
-            and _ENV_TEST_FAILURE_TRACEBACKS in os.environ):
-        configs[keys.Config.key_test_failure_tracebacks.
-                value] = os.environ[_ENV_TEST_FAILURE_TRACEBACKS]
+    if (
+        keys.Config.key_test_paths.value not in configs
+        and _ENV_ACTS_TESTPATHS in os.environ
+    ):
+        print("Using environment test paths: %s" % (os.environ[_ENV_ACTS_TESTPATHS]))
+        configs[keys.Config.key_test_paths.value] = os.environ[
+            _ENV_ACTS_TESTPATHS
+        ].split(_PATH_SEPARATOR)
+    if (
+        keys.Config.key_test_failure_tracebacks not in configs
+        and _ENV_TEST_FAILURE_TRACEBACKS in os.environ
+    ):
+        configs[keys.Config.key_test_failure_tracebacks.value] = os.environ[
+            _ENV_TEST_FAILURE_TRACEBACKS
+        ]
 
     # TODO: See if there is a better way to do this: b/29836695
     config_path, _ = os.path.split(utils.abs_path(test_config_path))
     configs[keys.Config.key_config_path.value] = config_path
     _validate_test_config(configs)
-    _validate_testbed_configs(testbeds, config_path)
+    _validate_testbed_configs(testbeds)
     # Unpack testbeds into separate json objects.
     configs.pop(keys.Config.key_testbed.value)
     test_run_configs = []
 
     for _, testbed in testbeds.items():
         test_run_config = mobly_config_parser.TestRunConfig()
-        test_run_config.testbed_name = testbed[
-            keys.Config.key_testbed_name.value]
+        test_run_config.testbed_name = testbed[keys.Config.key_testbed_name.value]
         test_run_config.controller_configs = testbed
         test_run_config.controller_configs[
-            keys.Config.key_test_paths.value] = configs.get(
-                keys.Config.key_test_paths.value, None)
-        test_run_config.log_path = configs.get(keys.Config.key_log_path.value,
-                                               None)
+            keys.Config.key_test_paths.value
+        ] = configs.get(keys.Config.key_test_paths.value, None)
+        test_run_config.log_path = configs.get(keys.Config.key_log_path.value, None)
         if test_run_config.log_path is not None:
             test_run_config.log_path = utils.abs_path(test_run_config.log_path)
 
@@ -266,13 +237,13 @@
     Returns:
         A list of strings, each is a test specifier.
     """
-    with open(fpath, 'r') as f:
+    with open(fpath, "r") as f:
         tf = []
         for line in f:
             line = line.strip()
             if not line:
                 continue
-            if len(tf) and (tf[-1].endswith(':') or tf[-1].endswith(',')):
+            if len(tf) and (tf[-1].endswith(":") or tf[-1].endswith(",")):
                 tf[-1] += line
             else:
                 tf.append(line)
diff --git a/src/antlion/context.py b/src/antlion/context.py
index 5fe1417..cfe9df8 100644
--- a/src/antlion/context.py
+++ b/src/antlion/context.py
@@ -48,7 +48,7 @@
     """
     if depth is None:
         return _contexts[-1]
-    return _contexts[min(depth, len(_contexts)-1)]
+    return _contexts[min(depth, len(_contexts) - 1)]
 
 
 def get_context_for_event(event):
@@ -67,7 +67,7 @@
         return _get_context_for_test_case_event(event)
     if isinstance(event, TestClassEvent):
         return _get_context_for_test_class_event(event)
-    raise TypeError('Unrecognized event type: %s %s', event, event.__class__)
+    raise TypeError("Unrecognized event type: %s %s", event, event.__class__)
 
 
 def _get_context_for_test_case_event(event):
@@ -180,7 +180,7 @@
         Args:
             log_name: The name of the logger.
             base_output_path: The base path of output files for this logger.
-            """
+        """
         cls._base_output_paths[log_name] = base_output_path
 
     def get_subcontext(self, log_name=None):
@@ -198,7 +198,7 @@
         Returns:
             The output path.
         """
-        return self._subcontexts.get(log_name, '')
+        return self._subcontexts.get(log_name, "")
 
     @classmethod
     def add_subcontext(cls, log_name, subcontext):
@@ -225,9 +225,11 @@
             The output path.
         """
 
-        path = os.path.join(self.get_base_output_path(log_name),
-                            self._get_default_context_dir(),
-                            self.get_subcontext(log_name))
+        path = os.path.join(
+            self.get_base_output_path(log_name),
+            self._get_default_context_dir(),
+            self.get_subcontext(log_name),
+        )
         os.makedirs(path, exist_ok=True)
         return path
 
@@ -251,8 +253,9 @@
             return logging.log_path
         except AttributeError as e:
             raise EnvironmentError(
-                'The ACTS logger has not been set up and'
-                ' "base_output_path" has not been set.') from e
+                "The ACTS logger has not been set up and"
+                ' "base_output_path" has not been set.'
+            ) from e
 
     def _get_default_context_dir(self):
         """Gets the default output directory for this context."""
@@ -264,14 +267,14 @@
 
     @property
     def identifier(self):
-        return 'root'
+        return "root"
 
     def _get_default_context_dir(self):
         """Gets the default output directory for this context.
 
         Logs at the root level context are placed directly in the base level
         directory, so no context-level path exists."""
-        return ''
+        return ""
 
 
 class TestClassContext(TestContext):
@@ -336,7 +339,7 @@
 
     @property
     def identifier(self):
-        return '%s.%s' % (self.test_class_name, self.test_case_name)
+        return "%s.%s" % (self.test_class_name, self.test_case_name)
 
     def _get_default_context_dir(self):
         """Gets the default output directory for this context.
@@ -345,9 +348,7 @@
         by the name of the test case. This is in line with the ACTS logger
         itself.
         """
-        return os.path.join(
-            self.test_class_name,
-            self.test_case_name)
+        return os.path.join(self.test_class_name, self.test_case_name)
 
 
 # stack for keeping track of the current test context
diff --git a/src/antlion/controllers/__init__.py b/src/antlion/controllers/__init__.py
index 640393c..e69de29 100644
--- a/src/antlion/controllers/__init__.py
+++ b/src/antlion/controllers/__init__.py
@@ -1,31 +0,0 @@
-"""Modules under antlion.controllers provide interfaces to hardware/software
-resources that ACTS manages.
-
-Top level controllers module are controller modules that need to be explicitly
-specified by users in test configuration files. Top level controller modules
-should have the following module level functions:
-
-def create(configs, logger):
-    '''Instantiates the controller class with the input configs.
-    Args:
-        configs: A list of dicts each representing config for one controller
-            object.
-        logger: The main logger used in the current test run.
-    Returns:
-        A list of controller objects.
-
-def destroy(objs):
-    '''Destroys a list of controller objects created by the "create" function
-    and releases all the resources.
-
-    Args:
-        objs: A list of controller objects created from this module.
-    '''
-"""
-"""This is a list of all the top level controller modules"""
-__all__ = [
-    "android_device", "attenuator", "bluetooth_pts_device", "monsoon",
-    "access_point", "iperf_server", "packet_sender", "arduino_wifi_dongle",
-    "packet_capture", "fuchsia_device", "pdu", "openwrt_ap", "tigertail",
-    "asus_axe11000_ap"
-]
diff --git a/src/antlion/controllers/abstract_inst.py b/src/antlion/controllers/abstract_inst.py
deleted file mode 100644
index d55c3a5..0000000
--- a/src/antlion/controllers/abstract_inst.py
+++ /dev/null
@@ -1,243 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Python module for Abstract Instrument Library."""
-
-import socket
-import requests
-from antlion import logger
-
-
-class SocketInstrumentError(Exception):
-    """Abstract Instrument Error Class, via Socket and SCPI."""
-
-    def __init__(self, error, command=None):
-        """Init method for Socket Instrument Error.
-
-        Args:
-            error: Exception error.
-            command: Additional information on command,
-                Type, Str.
-        """
-        super(SocketInstrumentError, self).__init__(error)
-        self._error_code = error
-        self._error_message = self._error_code
-        if command is not None:
-            self._error_message = 'Command {} returned the error: {}.'.format(
-                repr(command), repr(self._error_message))
-
-    def __str__(self):
-        return self._error_message
-
-
-class SocketInstrument(object):
-    """Abstract Instrument Class, via Socket and SCPI."""
-
-    def __init__(self, ip_addr, ip_port):
-        """Init method for Socket Instrument.
-
-        Args:
-            ip_addr: IP Address.
-                Type, str.
-            ip_port: TCPIP Port.
-                Type, str.
-        """
-        self._socket_timeout = 120
-        self._socket_buffer_size = 1024
-
-        self._ip_addr = ip_addr
-        self._ip_port = ip_port
-
-        self._escseq = '\n'
-        self._codefmt = 'utf-8'
-
-        self._logger = logger.create_tagged_trace_logger(
-            '%s:%s' % (self._ip_addr, self._ip_port))
-
-        self._socket = None
-
-    def _connect_socket(self):
-        """Init and Connect to socket."""
-        try:
-            self._socket = socket.create_connection(
-                (self._ip_addr, self._ip_port), timeout=self._socket_timeout)
-
-            infmsg = 'Opened Socket connection to {}:{} with handle {}.'.format(
-                repr(self._ip_addr), repr(self._ip_port), repr(self._socket))
-            self._logger.debug(infmsg)
-
-        except socket.timeout:
-            errmsg = 'Socket timeout while connecting to instrument.'
-            self._logger.exception(errmsg)
-            raise SocketInstrumentError(errmsg)
-
-        except socket.error:
-            errmsg = 'Socket error while connecting to instrument.'
-            self._logger.exception(errmsg)
-            raise SocketInstrumentError(errmsg)
-
-    def _send(self, cmd):
-        """Send command via Socket.
-
-        Args:
-            cmd: Command to send,
-                Type, Str.
-        """
-        if not self._socket:
-            self._logger.warning('Socket instrument is not connected')
-            self._connect_socket()
-
-        cmd_es = cmd + self._escseq
-
-        try:
-            self._socket.sendall(cmd_es.encode(self._codefmt))
-            self._logger.debug('Sent %r to %r:%r.', cmd, self._ip_addr,
-                               self._ip_port)
-
-        except socket.timeout:
-            errmsg = ('Socket timeout while sending command {} '
-                      'to instrument.').format(repr(cmd))
-            self._logger.exception(errmsg)
-            raise SocketInstrumentError(errmsg)
-
-        except socket.error:
-            errmsg = ('Socket error while sending command {} '
-                      'to instrument.').format(repr(cmd))
-            self._logger.exception(errmsg)
-            raise SocketInstrumentError(errmsg)
-
-        except Exception as err:
-            errmsg = ('Error {} while sending command {} '
-                      'to instrument.').format(repr(cmd), repr(err))
-            self._logger.exception(errmsg)
-            raise
-
-    def _recv(self):
-        """Receive response via Socket.
-
-        Returns:
-            resp: Response from Instrument via Socket,
-                Type, Str.
-        """
-        if not self._socket:
-            self._logger.warning('Socket instrument is not connected')
-            self._connect_socket()
-
-        resp = ''
-
-        try:
-            while True:
-                resp_tmp = self._socket.recv(self._socket_buffer_size)
-                resp_tmp = resp_tmp.decode(self._codefmt)
-                resp += resp_tmp
-                if len(resp_tmp) < self._socket_buffer_size:
-                    break
-
-        except socket.timeout:
-            errmsg = 'Socket timeout while receiving response from instrument.'
-            self._logger.exception(errmsg)
-            raise SocketInstrumentError(errmsg)
-
-        except socket.error:
-            errmsg = 'Socket error while receiving response from instrument.'
-            self._logger.exception(errmsg)
-            raise SocketInstrumentError(errmsg)
-
-        except Exception as err:
-            errmsg = ('Error {} while receiving response '
-                      'from instrument').format(repr(err))
-            self._logger.exception(errmsg)
-            raise
-
-        resp = resp.rstrip(self._escseq)
-
-        self._logger.debug('Received %r from %r:%r.', resp, self._ip_addr,
-                           self._ip_port)
-
-        return resp
-
-    def _close_socket(self):
-        """Close Socket Instrument."""
-        if not self._socket:
-            return
-
-        try:
-            self._socket.shutdown(socket.SHUT_RDWR)
-            self._socket.close()
-            self._socket = None
-            self._logger.debug('Closed Socket Instrument %r:%r.',
-                               self._ip_addr, self._ip_port)
-
-        except Exception as err:
-            errmsg = 'Error {} while closing instrument.'.format(repr(err))
-            self._logger.exception(errmsg)
-            raise
-
-    def _query(self, cmd):
-        """query instrument via Socket.
-
-        Args:
-            cmd: Command to send,
-                Type, Str.
-
-        Returns:
-            resp: Response from Instrument via Socket,
-                Type, Str.
-        """
-        self._send(cmd + ';*OPC?')
-        resp = self._recv()
-        return resp
-
-
-class RequestInstrument(object):
-    """Abstract Instrument Class, via Request."""
-
-    def __init__(self, ip_addr):
-        """Init method for request instrument.
-
-        Args:
-            ip_addr: IP Address.
-                Type, Str.
-        """
-        self._request_timeout = 120
-        self._request_protocol = 'http'
-        self._ip_addr = ip_addr
-        self._escseq = '\r\n'
-
-        self._logger = logger.create_tagged_trace_logger(self._ip_addr)
-
-    def _query(self, cmd):
-        """query instrument via request.
-
-        Args:
-            cmd: Command to send,
-                Type, Str.
-
-        Returns:
-            resp: Response from Instrument via request,
-                Type, Str.
-        """
-        request_cmd = '{}://{}/{}'.format(self._request_protocol,
-                                          self._ip_addr, cmd)
-        resp_raw = requests.get(request_cmd, timeout=self._request_timeout)
-
-        resp = resp_raw.text
-        for char_del in self._escseq:
-            resp = resp.replace(char_del, '')
-
-        self._logger.debug('Sent %r to %r, and get %r.', cmd, self._ip_addr,
-                           resp)
-
-        return resp
diff --git a/src/antlion/controllers/access_point.py b/src/antlion/controllers/access_point.py
index d9116ce..91a241d 100755
--- a/src/antlion/controllers/access_point.py
+++ b/src/antlion/controllers/access_point.py
@@ -14,188 +14,60 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import collections
 import ipaddress
-import os
 import time
 
-from typing import FrozenSet, Set, TYPE_CHECKING
+from dataclasses import dataclass
+from typing import Any, Dict, FrozenSet, List, Optional, Set, Tuple
 
 from antlion import logger
 from antlion import utils
-from antlion.controllers import pdu
-from antlion.controllers.ap_lib import ap_get_interface
-from antlion.controllers.ap_lib import ap_iwconfig
-from antlion.controllers.ap_lib import bridge_interface
-from antlion.controllers.ap_lib import dhcp_config
-from antlion.controllers.ap_lib import dhcp_server
-from antlion.controllers.ap_lib import hostapd
-from antlion.controllers.ap_lib import hostapd_ap_preset
-from antlion.controllers.ap_lib import hostapd_config
+from antlion.capabilities.ssh import SSHConfig, SSHProvider
 from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import radvd
-from antlion.controllers.ap_lib import radvd_config
+from antlion.controllers.ap_lib.ap_get_interface import ApInterfaces
+from antlion.controllers.ap_lib.ap_iwconfig import ApIwconfig
+from antlion.controllers.ap_lib.bridge_interface import BridgeInterface
+from antlion.controllers.ap_lib.dhcp_config import DhcpConfig, Subnet
+from antlion.controllers.ap_lib.dhcp_server import DhcpServer, NoInterfaceError
 from antlion.controllers.ap_lib.extended_capabilities import ExtendedCapabilities
-from antlion.controllers.ap_lib.wireless_network_management import BssTransitionManagementRequest
+from antlion.controllers.ap_lib.hostapd import Hostapd
+from antlion.controllers.ap_lib.hostapd_ap_preset import create_ap_preset
+from antlion.controllers.ap_lib.hostapd_config import HostapdConfig
+from antlion.controllers.ap_lib.hostapd_security import Security
+from antlion.controllers.ap_lib.radvd import Radvd
+from antlion.controllers.ap_lib.radvd_config import RadvdConfig
+from antlion.controllers.ap_lib.wireless_network_management import (
+    BssTransitionManagementRequest,
+)
+from antlion.controllers.pdu import PduDevice, get_pdu_port_for_device
 from antlion.controllers.utils_lib.commands import ip
 from antlion.controllers.utils_lib.commands import route
 from antlion.controllers.utils_lib.ssh import connection
 from antlion.controllers.utils_lib.ssh import settings
 from antlion.libs.proc import job
 
-if TYPE_CHECKING:
-    from antlion.controllers.ap_lib.radvd import Radvd
-
-MOBLY_CONTROLLER_CONFIG_NAME = 'AccessPoint'
-ACTS_CONTROLLER_REFERENCE_NAME = 'access_points'
-_BRCTL = 'brctl'
-
-LIFETIME = 180
-PROC_NET_SNMP6 = '/proc/net/snmp6'
-SCAPY_INSTALL_COMMAND = 'sudo python setup.py install'
-RA_MULTICAST_ADDR = '33:33:00:00:00:01'
-RA_SCRIPT = 'sendra.py'
-
-
-def create(configs):
-    """Creates ap controllers from a json config.
-
-    Creates an ap controller from either a list, or a single
-    element. The element can either be just the hostname or a dictionary
-    containing the hostname and username of the ap to connect to over ssh.
-
-    Args:
-        The json configs that represent this controller.
-
-    Returns:
-        A new AccessPoint.
-    """
-    return [AccessPoint(c) for c in configs]
-
-
-def destroy(aps):
-    """Destroys a list of access points.
-
-    Args:
-        aps: The list of access points to destroy.
-    """
-    for ap in aps:
-        ap.close()
-
-
-def get_info(aps):
-    """Get information on a list of access points.
-
-    Args:
-        aps: A list of AccessPoints.
-
-    Returns:
-        A list of all aps hostname.
-    """
-    return [ap.ssh_settings.hostname for ap in aps]
-
-
-def setup_ap(
-        access_point,
-        profile_name,
-        channel,
-        ssid,
-        mode=None,
-        preamble=None,
-        beacon_interval=None,
-        dtim_period=None,
-        frag_threshold=None,
-        rts_threshold=None,
-        force_wmm=None,
-        hidden=False,
-        security=None,
-        pmf_support=None,
-        additional_ap_parameters=None,
-        password=None,
-        n_capabilities=None,
-        ac_capabilities=None,
-        vht_bandwidth=None,
-        wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
-        setup_bridge=False,
-        is_ipv6_enabled=False,
-        is_nat_enabled=True):
-    """Creates a hostapd profile and runs it on an ap. This is a convenience
-    function that allows us to start an ap with a single function, without first
-    creating a hostapd config.
-
-    Args:
-        access_point: An ACTS access_point controller
-        profile_name: The profile name of one of the hostapd ap presets.
-        channel: What channel to set the AP to.
-        preamble: Whether to set short or long preamble (True or False)
-        beacon_interval: The beacon interval (int)
-        dtim_period: Length of dtim period (int)
-        frag_threshold: Fragmentation threshold (int)
-        rts_threshold: RTS threshold (int)
-        force_wmm: Enable WMM or not (True or False)
-        hidden: Advertise the SSID or not (True or False)
-        security: What security to enable.
-        pmf_support: int, whether pmf is not disabled, enabled, or required
-        additional_ap_parameters: Additional parameters to send the AP.
-        password: Password to connect to WLAN if necessary.
-        check_connectivity: Whether to check for internet connectivity.
-        wnm_features: WNM features to enable on the AP.
-        setup_bridge: Whether to bridge the LAN interface WLAN interface.
-            Only one WLAN interface can be bridged with the LAN interface
-            and none of the guest networks can be bridged.
-        is_ipv6_enabled: If True, start a IPv6 router advertisement daemon
-        is_nat_enabled: If True, start NAT on the AP to allow the DUT to be able
-            to access the internet if the WAN port is connected to the internet.
-
-    Returns:
-        An identifier for each ssid being started. These identifiers can be
-        used later by this controller to control the ap.
-
-    Raises:
-        Error: When the ap can't be brought up.
-    """
-    ap = hostapd_ap_preset.create_ap_preset(profile_name=profile_name,
-                                            iface_wlan_2g=access_point.wlan_2g,
-                                            iface_wlan_5g=access_point.wlan_5g,
-                                            channel=channel,
-                                            ssid=ssid,
-                                            mode=mode,
-                                            short_preamble=preamble,
-                                            beacon_interval=beacon_interval,
-                                            dtim_period=dtim_period,
-                                            frag_threshold=frag_threshold,
-                                            rts_threshold=rts_threshold,
-                                            force_wmm=force_wmm,
-                                            hidden=hidden,
-                                            bss_settings=[],
-                                            security=security,
-                                            pmf_support=pmf_support,
-                                            n_capabilities=n_capabilities,
-                                            ac_capabilities=ac_capabilities,
-                                            vht_bandwidth=vht_bandwidth,
-                                            wnm_features=wnm_features)
-    return access_point.start_ap(
-        hostapd_config=ap,
-        radvd_config=radvd_config.RadvdConfig() if is_ipv6_enabled else None,
-        setup_bridge=setup_bridge,
-        is_nat_enabled=is_nat_enabled,
-        additional_parameters=additional_ap_parameters)
+MOBLY_CONTROLLER_CONFIG_NAME = "AccessPoint"
+ACTS_CONTROLLER_REFERENCE_NAME = "access_points"
 
 
 class Error(Exception):
     """Error raised when there is a problem with the access point."""
 
 
-_ApInstance = collections.namedtuple('_ApInstance', ['hostapd', 'subnet'])
+@dataclass
+class _ApInstance:
+    hostapd: Hostapd
+    subnet: Subnet
+
 
 # These ranges were split this way since each physical radio can have up
 # to 8 SSIDs so for the 2GHz radio the DHCP range will be
 # 192.168.1 - 8 and the 5Ghz radio will be 192.168.9 - 16
-_AP_2GHZ_SUBNET_STR_DEFAULT = '192.168.1.0/24'
-_AP_5GHZ_SUBNET_STR_DEFAULT = '192.168.9.0/24'
+_AP_2GHZ_SUBNET_STR_DEFAULT = "192.168.1.0/24"
+_AP_5GHZ_SUBNET_STR_DEFAULT = "192.168.9.0/24"
 
 # The last digit of the ip for the bridge interface
-BRIDGE_IP_LAST = '100'
+BRIDGE_IP_LAST = "100"
 
 
 class AccessPoint(object):
@@ -207,48 +79,58 @@
         dhcp_settings: The dhcp server settings being used.
     """
 
-    def __init__(self, configs):
+    def __init__(self, configs: Dict[str, Any]) -> None:
         """
         Args:
             configs: configs for the access point from config file.
         """
-        self.ssh_settings = settings.from_config(configs['ssh_config'])
+        self.ssh_settings = settings.from_config(configs["ssh_config"])
         self.log = logger.create_logger(
-            lambda msg: f'[Access Point|{self.ssh_settings.hostname}] {msg}')
-        self.device_pdu_config = configs.get('PduDevice', None)
+            lambda msg: f"[Access Point|{self.ssh_settings.hostname}] {msg}"
+        )
+        self.device_pdu_config = configs.get("PduDevice", None)
         self.identifier = self.ssh_settings.hostname
 
-        if 'ap_subnet' in configs:
-            self._AP_2G_SUBNET_STR = configs['ap_subnet']['2g']
-            self._AP_5G_SUBNET_STR = configs['ap_subnet']['5g']
+        if "ap_subnet" in configs:
+            self._AP_2G_SUBNET_STR: str = configs["ap_subnet"]["2g"]
+            self._AP_5G_SUBNET_STR: str = configs["ap_subnet"]["5g"]
         else:
             self._AP_2G_SUBNET_STR = _AP_2GHZ_SUBNET_STR_DEFAULT
             self._AP_5G_SUBNET_STR = _AP_5GHZ_SUBNET_STR_DEFAULT
 
-        self._AP_2G_SUBNET = dhcp_config.Subnet(
-            ipaddress.ip_network(self._AP_2G_SUBNET_STR))
-        self._AP_5G_SUBNET = dhcp_config.Subnet(
-            ipaddress.ip_network(self._AP_5G_SUBNET_STR))
+        self._AP_2G_SUBNET = Subnet(ipaddress.ip_network(self._AP_2G_SUBNET_STR))
+        self._AP_5G_SUBNET = Subnet(ipaddress.ip_network(self._AP_5G_SUBNET_STR))
 
         self.ssh = connection.SshConnection(self.ssh_settings)
 
+        # TODO(http://b/278758876): Replace self.ssh with self.ssh_provider
+        self.ssh_provider = SSHProvider(
+            SSHConfig(
+                self.ssh_settings.username,
+                self.ssh_settings.hostname,
+                self.ssh_settings.identity_file,
+                port=self.ssh_settings.port,
+                ssh_binary=self.ssh_settings.executable,
+                connect_timeout=90,
+            )
+        )
+
         # Singleton utilities for running various commands.
         self._ip_cmd = ip.LinuxIpCommand(self.ssh)
         self._route_cmd = route.LinuxRouteCommand(self.ssh)
 
         # A map from network interface name to _ApInstance objects representing
         # the hostapd instance running against the interface.
-        self._aps = dict()
-        self._dhcp = None
-        self._dhcp_bss = dict()
-        self._radvd: Radvd = None
-        self.bridge = bridge_interface.BridgeInterface(self)
-        self.iwconfig = ap_iwconfig.ApIwconfig(self)
+        self._aps: Dict[str, _ApInstance] = dict()
+        self._dhcp: Optional[DhcpServer] = None
+        self._dhcp_bss: Dict[Any, Subnet] = dict()
+        self._radvd: Optional[Radvd] = None
+        self.bridge = BridgeInterface(self)
+        self.iwconfig = ApIwconfig(self)
 
         # Check to see if wan_interface is specified in acts_config for tests
         # isolated from the internet and set this override.
-        self.interfaces = ap_get_interface.ApInterfaces(
-            self, configs.get('wan_interface'))
+        self.interfaces = ApInterfaces(self, configs.get("wan_interface"))
 
         # Get needed interface names and initialize the unnecessary ones.
         self.wan = self.interfaces.get_wan_interface()
@@ -257,10 +139,9 @@
         self.wlan_5g = self.wlan[1]
         self.lan = self.interfaces.get_lan_interface()
         self._initial_ap()
-        self.scapy_install_path = None
         self.setup_bridge = False
 
-    def _initial_ap(self):
+    def _initial_ap(self) -> None:
         """Initial AP interfaces.
 
         Bring down hostapd if instance is running, bring down all bridge
@@ -271,32 +152,34 @@
         # interfaces need to be brought down as part of the AP initialization
         # process, otherwise test would fail.
         try:
-            self.ssh.run('stop wpasupplicant')
+            self.ssh.run("stop wpasupplicant")
         except job.Error:
-            self.log.info('No wpasupplicant running')
+            self.log.info("No wpasupplicant running")
         try:
-            self.ssh.run('stop hostapd')
+            self.ssh.run("stop hostapd")
         except job.Error:
-            self.log.info('No hostapd running')
+            self.log.info("No hostapd running")
         # Bring down all wireless interfaces
         for iface in self.wlan:
-            WLAN_DOWN = f'ip link set {iface} down'
+            WLAN_DOWN = f"ip link set {iface} down"
             self.ssh.run(WLAN_DOWN)
         # Bring down all bridge interfaces
         bridge_interfaces = self.interfaces.get_bridge_interface()
         if bridge_interfaces:
             for iface in bridge_interfaces:
-                BRIDGE_DOWN = f'ip link set {iface} down'
-                BRIDGE_DEL = f'brctl delbr {iface}'
+                BRIDGE_DOWN = f"ip link set {iface} down"
+                BRIDGE_DEL = f"brctl delbr {iface}"
                 self.ssh.run(BRIDGE_DOWN)
                 self.ssh.run(BRIDGE_DEL)
 
-    def start_ap(self,
-                 hostapd_config: hostapd_config.HostapdConfig,
-                 radvd_config=None,
-                 setup_bridge=False,
-                 is_nat_enabled=True,
-                 additional_parameters=None):
+    def start_ap(
+        self,
+        hostapd_config: HostapdConfig,
+        radvd_config: RadvdConfig = None,
+        setup_bridge: bool = False,
+        is_nat_enabled: bool = True,
+        additional_parameters: Dict[str, Any] = None,
+    ) -> List[Any]:
         """Starts as an ap using a set of configurations.
 
         This will start an ap on this host. To start an ap the controller
@@ -306,19 +189,17 @@
         for that subnet for any device that connects through that interface.
 
         Args:
-            hostapd_config: hostapd_config.HostapdConfig, The configurations
-                to use when starting up the ap.
-            radvd_config: radvd_config.RadvdConfig, The IPv6 configuration
-                to use when starting up the ap.
+            hostapd_config: The configurations to use when starting up the ap.
+            radvd_config: The IPv6 configuration to use when starting up the ap.
             setup_bridge: Whether to bridge the LAN interface WLAN interface.
                 Only one WLAN interface can be bridged with the LAN interface
                 and none of the guest networks can be bridged.
             is_nat_enabled: If True, start NAT on the AP to allow the DUT to be
                 able to access the internet if the WAN port is connected to the
                 internet.
-            additional_parameters: A dictionary of parameters that can sent
-                directly into the hostapd config file.  This can be used for
-                debugging and or adding one off parameters into the config.
+            additional_parameters: Parameters that can sent directly into the
+                hostapd config file.  This can be used for debugging and or
+                adding one off parameters into the config.
 
         Returns:
             An identifier for each ssid being started. These identifiers can be
@@ -336,13 +217,13 @@
 
         # radvd requires the interface to have a IPv6 link-local address.
         if radvd_config:
-            self.ssh.run(f'sysctl -w net.ipv6.conf.{interface}.disable_ipv6=0')
-            self.ssh.run(f'sysctl -w net.ipv6.conf.{interface}.forwarding=1')
+            self.ssh.run(f"sysctl -w net.ipv6.conf.{interface}.disable_ipv6=0")
+            self.ssh.run(f"sysctl -w net.ipv6.conf.{interface}.forwarding=1")
 
         # In order to handle dhcp servers on any interface, the initiation of
         # the dhcp server must be done after the wlan interfaces are figured
         # out as opposed to being in __init__
-        self._dhcp = dhcp_server.DhcpServer(self.ssh, interface=interface)
+        self._dhcp = DhcpServer(self.ssh, interface=interface)
 
         # For multi bssid configurations the mac address
         # of the wireless interface needs to have enough space to mask out
@@ -352,16 +233,18 @@
         cmd = f"ip link show {interface}|grep ether|awk -F' ' '{{print $2}}'"
         interface_mac_orig = self.ssh.run(cmd)
         if interface == self.wlan_5g:
-            hostapd_config.bssid = interface_mac_orig.stdout[:-1] + '0'
+            hostapd_config.bssid = interface_mac_orig.stdout[:-1] + "0"
             last_octet = 1
         if interface == self.wlan_2g:
-            hostapd_config.bssid = interface_mac_orig.stdout[:-1] + '8'
+            hostapd_config.bssid = interface_mac_orig.stdout[:-1] + "8"
             last_octet = 9
         if interface in self._aps:
-            raise ValueError('No WiFi interface available for AP on '
-                             f'channel {hostapd_config.channel}')
+            raise ValueError(
+                "No WiFi interface available for AP on "
+                f"channel {hostapd_config.channel}"
+            )
 
-        apd = hostapd.Hostapd(self.ssh, interface)
+        apd = Hostapd(self.ssh, interface)
         new_instance = _ApInstance(hostapd=apd, subnet=subnet)
         self._aps[interface] = new_instance
 
@@ -374,7 +257,7 @@
         # on the AP, but not for traffic handled by the Linux networking stack
         # such as ping.
         if radvd_config:
-            self._route_cmd.add_route(interface, 'fe80::/64')
+            self._route_cmd.add_route(interface, "fe80::/64")
 
         self._dhcp_bss = dict()
         if hostapd_config.bss_lookup:
@@ -390,15 +273,17 @@
             for bss in hostapd_config.bss_lookup:
                 if interface_mac_orig:
                     hostapd_config.bss_lookup[bss].bssid = (
-                        interface_mac_orig.stdout[:-1] + hex(last_octet)[-1:])
+                        interface_mac_orig.stdout[:-1] + hex(last_octet)[-1:]
+                    )
                 self._route_cmd.clear_routes(net_interface=str(bss))
                 if interface is self.wlan_2g:
                     starting_ip_range = self._AP_2G_SUBNET_STR
                 else:
                     starting_ip_range = self._AP_5G_SUBNET_STR
-                a, b, c, d = starting_ip_range.split('.')
-                self._dhcp_bss[bss] = dhcp_config.Subnet(
-                    ipaddress.ip_network(f'{a}.{b}.{int(c) + counter}.{d}'))
+                a, b, c, d = starting_ip_range.split(".")
+                self._dhcp_bss[bss] = Subnet(
+                    ipaddress.ip_network(f"{a}.{b}.{int(c) + counter}.{d}")
+                )
                 counter = counter + 1
                 last_octet = last_octet + 1
 
@@ -407,9 +292,10 @@
         # The DHCP serer requires interfaces to have ips and routes before
         # the server will come up.
         interface_ip = ipaddress.ip_interface(
-            f'{subnet.router}/{subnet.network.netmask}')
+            f"{subnet.router}/{subnet.network.netmask}"
+        )
         if setup_bridge is True:
-            bridge_interface_name = 'eth_test'
+            bridge_interface_name = "eth_test"
             self.create_bridge(bridge_interface_name, [interface, self.lan])
             self._ip_cmd.set_ipv4_address(bridge_interface_name, interface_ip)
         else:
@@ -421,13 +307,13 @@
             # variables represent the interface name, k, and dhcp info, v.
             for k, v in self._dhcp_bss.items():
                 bss_interface_ip = ipaddress.ip_interface(
-                    f'{self._dhcp_bss[k].router}/{self._dhcp_bss[k].network.netmask}'
+                    f"{self._dhcp_bss[k].router}/{self._dhcp_bss[k].network.netmask}"
                 )
                 self._ip_cmd.set_ipv4_address(str(k), bss_interface_ip)
 
         # Restart the DHCP server with our updated list of subnets.
         configured_subnets = self.get_configured_subnets()
-        dhcp_conf = dhcp_config.DhcpConfig(subnets=configured_subnets)
+        dhcp_conf = DhcpConfig(subnets=configured_subnets)
         self.start_dhcp(dhcp_conf=dhcp_conf)
         if is_nat_enabled:
             self.start_nat()
@@ -437,7 +323,7 @@
             self.enable_forwarding()
         if radvd_config:
             radvd_interface = bridge_interface_name if setup_bridge else interface
-            self._radvd = radvd.Radvd(self.ssh, radvd_interface)
+            self._radvd = Radvd(self.ssh, radvd_interface)
             self._radvd.start(radvd_config)
         else:
             self._radvd = None
@@ -447,40 +333,40 @@
 
         return bss_interfaces
 
-    def get_configured_subnets(self):
+    def get_configured_subnets(self) -> List[Subnet]:
         """Get the list of configured subnets on the access point.
 
         This allows consumers of the access point objects create custom DHCP
         configs with the correct subnets.
 
-        Returns: a list of dhcp_config.Subnet objects
+        Returns: a list of Subnet objects
         """
         configured_subnets = [x.subnet for x in self._aps.values()]
         for k, v in self._dhcp_bss.items():
             configured_subnets.append(v)
         return configured_subnets
 
-    def start_dhcp(self, dhcp_conf):
+    def start_dhcp(self, dhcp_conf: DhcpConfig) -> None:
         """Start a DHCP server for the specified subnets.
 
         This allows consumers of the access point objects to control DHCP.
 
         Args:
-            dhcp_conf: A dhcp_config.DhcpConfig object.
+            dhcp_conf: A DhcpConfig object.
 
         Raises:
             Error: Raised when a dhcp server error is found.
         """
         self._dhcp.start(config=dhcp_conf)
 
-    def stop_dhcp(self):
+    def stop_dhcp(self) -> None:
         """Stop DHCP for this AP object.
 
         This allows consumers of the access point objects to control DHCP.
         """
         self._dhcp.stop()
 
-    def get_dhcp_logs(self):
+    def get_dhcp_logs(self) -> Optional[str]:
         """Get DHCP logs for this AP object.
 
         This allows consumers of the access point objects to validate DHCP
@@ -494,7 +380,7 @@
             return self._dhcp.get_logs()
         return None
 
-    def get_hostapd_logs(self):
+    def get_hostapd_logs(self) -> Dict[str, str]:
         """Get hostapd logs for all interfaces on AP object.
 
         This allows consumers of the access point objects to validate hostapd
@@ -504,11 +390,10 @@
         """
         hostapd_logs = dict()
         for identifier in self._aps:
-            hostapd_logs[identifier] = self._aps.get(
-                identifier).hostapd.pull_logs()
+            hostapd_logs[identifier] = self._aps.get(identifier).hostapd.pull_logs()
         return hostapd_logs
 
-    def get_radvd_logs(self):
+    def get_radvd_logs(self) -> Optional[str]:
         """Get radvd logs for this AP object.
 
         This allows consumers of the access point objects to validate radvd
@@ -522,16 +407,16 @@
             return self._radvd.pull_logs()
         return None
 
-    def enable_forwarding(self):
+    def enable_forwarding(self) -> None:
         """Enable IPv4 and IPv6 forwarding on the AP.
 
         When forwarding is enabled, the access point is able to route IP packets
         between devices in the same subnet.
         """
-        self.ssh.run('echo 1 > /proc/sys/net/ipv4/ip_forward')
-        self.ssh.run('echo 1 > /proc/sys/net/ipv6/conf/all/forwarding')
+        self.ssh.run("echo 1 > /proc/sys/net/ipv4/ip_forward")
+        self.ssh.run("echo 1 > /proc/sys/net/ipv6/conf/all/forwarding")
 
-    def start_nat(self):
+    def start_nat(self) -> None:
         """Start NAT on the AP.
 
         This allows consumers of the access point objects to enable NAT
@@ -544,11 +429,10 @@
         # the WAN and LAN/WLAN ports.  This means anyone connecting to the
         # WLAN/LAN ports will be able to access the internet if the WAN port
         # is connected to the internet.
-        self.ssh.run('iptables -t nat -F')
-        self.ssh.run(
-            f'iptables -t nat -A POSTROUTING -o {self.wan} -j MASQUERADE')
+        self.ssh.run("iptables -t nat -F")
+        self.ssh.run(f"iptables -t nat -A POSTROUTING -o {self.wan} -j MASQUERADE")
 
-    def stop_nat(self):
+    def stop_nat(self) -> None:
         """Stop NAT on the AP.
 
         This allows consumers of the access point objects to disable NAT on the
@@ -557,9 +441,9 @@
         Note that this is currently a global setting, since we don't have
         per-interface masquerade rules.
         """
-        self.ssh.run('iptables -t nat -F')
+        self.ssh.run("iptables -t nat -F")
 
-    def create_bridge(self, bridge_name, interfaces):
+    def create_bridge(self, bridge_name: str, interfaces: List[str]) -> None:
         """Create the specified bridge and bridge the specified interfaces.
 
         Args:
@@ -568,14 +452,14 @@
         """
 
         # Create the bridge interface
-        self.ssh.run(f'brctl addbr {bridge_name}')
+        self.ssh.run(f"brctl addbr {bridge_name}")
 
         for interface in interfaces:
-            self.ssh.run(f'brctl addif {bridge_name} {interface}')
+            self.ssh.run(f"brctl addif {bridge_name} {interface}")
 
-        self.ssh.run(f'ip link set {bridge_name} up')
+        self.ssh.run(f"ip link set {bridge_name} up")
 
-    def remove_bridge(self, bridge_name):
+    def remove_bridge(self, bridge_name: str) -> None:
         """Removes the specified bridge
 
         Args:
@@ -587,15 +471,15 @@
         #
         # Or if we're doing 2.4Ghz and 5Ghz SSIDs and we've already torn
         # down the bridge once, but we got called for each band.
-        result = self.ssh.run(f'brctl show {bridge_name}', ignore_status=True)
+        result = self.ssh.run(f"brctl show {bridge_name}", ignore_status=True)
 
         # If the bridge exists, we'll get an exit_status of 0, indicating
         # success, so we can continue and remove the bridge.
         if result.exit_status == 0:
-            self.ssh.run(f'ip link set {bridge_name} down')
-            self.ssh.run(f'brctl delbr {bridge_name}')
+            self.ssh.run(f"ip link set {bridge_name} down")
+            self.ssh.run(f"brctl delbr {bridge_name}")
 
-    def get_bssid_from_ssid(self, ssid, band):
+    def get_bssid_from_ssid(self, ssid: str, band: str) -> Optional[str]:
         """Gets the BSSID from a provided SSID
 
         Args:
@@ -611,8 +495,9 @@
         # Get the interface name associated with the given ssid.
         for interface in interfaces:
             iw_output = self.ssh.run(
-                f"iw dev {interface} info|grep ssid|awk -F' ' '{{print $2}}'")
-            if 'command failed: No such device' in iw_output.stderr:
+                f"iw dev {interface} info|grep ssid|awk -F' ' '{{print $2}}'"
+            )
+            if "command failed: No such device" in iw_output.stderr:
                 continue
             else:
                 # If the configured ssid is equal to the given ssid, we found
@@ -624,7 +509,7 @@
                     return iw_output.stdout
         return None
 
-    def stop_ap(self, identifier):
+    def stop_ap(self, identifier: str) -> None:
         """Stops a running ap on this controller.
 
         Args:
@@ -632,7 +517,7 @@
         """
 
         if identifier not in list(self._aps.keys()):
-            raise ValueError(f'Invalid identifier {identifier} given')
+            raise ValueError(f"Invalid identifier {identifier} given")
 
         instance = self._aps.get(identifier)
 
@@ -640,7 +525,7 @@
             self._radvd.stop()
         try:
             self.stop_dhcp()
-        except dhcp_server.NoInterfaceError:
+        except NoInterfaceError:
             pass
         self.stop_nat()
         instance.hostapd.stop()
@@ -650,18 +535,18 @@
         bridge_interfaces = self.interfaces.get_bridge_interface()
         if bridge_interfaces:
             for iface in bridge_interfaces:
-                BRIDGE_DOWN = f'ip link set {iface} down'
-                BRIDGE_DEL = f'brctl delbr {iface}'
+                BRIDGE_DOWN = f"ip link set {iface} down"
+                BRIDGE_DEL = f"brctl delbr {iface}"
                 self.ssh.run(BRIDGE_DOWN)
                 self.ssh.run(BRIDGE_DEL)
 
-    def stop_all_aps(self):
+    def stop_all_aps(self) -> None:
         """Stops all running aps on this device."""
 
         for ap in list(self._aps.keys()):
             self.stop_ap(ap)
 
-    def close(self):
+    def close(self) -> None:
         """Called to take down the entire access point.
 
         When called will stop all aps running on this host, shutdown the dhcp
@@ -672,7 +557,7 @@
             self.stop_all_aps()
         self.ssh.close()
 
-    def generate_bridge_configs(self, channel):
+    def generate_bridge_configs(self, channel: int) -> Tuple[str, Optional[str], str]:
         """Generate a list of configs for a bridge between LAN and WLAN.
 
         Args:
@@ -691,125 +576,65 @@
 
         iface_lan = self.lan
 
-        a, b, c, _ = subnet_str.strip('/24').split('.')
-        bridge_ip = f'{a}.{b}.{c}.{BRIDGE_IP_LAST}'
+        a, b, c, _ = subnet_str.strip("/24").split(".")
+        bridge_ip = f"{a}.{b}.{c}.{BRIDGE_IP_LAST}"
 
-        configs = (iface_wlan, iface_lan, bridge_ip)
+        return (iface_wlan, iface_lan, bridge_ip)
 
-        return configs
+    def ping(
+        self,
+        dest_ip: str,
+        count: int = 3,
+        interval: int = 1000,
+        timeout: int = 1000,
+        size: int = 56,
+        additional_ping_params: Optional[Any] = None,
+    ) -> Dict[str, Any]:
+        """Pings from AP to dest_ip, returns dict of ping stats (see utils.ping)"""
+        return utils.ping(
+            self.ssh,
+            dest_ip,
+            count=count,
+            interval=interval,
+            timeout=timeout,
+            size=size,
+            additional_ping_params=additional_ping_params,
+        )
 
-    def install_scapy(self, scapy_path, send_ra_path):
-        """Install scapy
-
-        Args:
-            scapy_path: path where scapy tar file is located on server
-            send_ra_path: path where sendra path is located on server
-        """
-        self.scapy_install_path = self.ssh.run('mktemp -d').stdout.rstrip()
-        self.log.info(f'Scapy install path: {self.scapy_install_path}')
-        self.ssh.send_file(scapy_path, self.scapy_install_path)
-        self.ssh.send_file(send_ra_path, self.scapy_install_path)
-
-        scapy = os.path.join(self.scapy_install_path,
-                             scapy_path.split('/')[-1])
-
-        self.ssh.run(f'tar -xvf {scapy} -C {self.scapy_install_path}')
-        self.ssh.run(f'cd {self.scapy_install_path}; {SCAPY_INSTALL_COMMAND}')
-
-    def cleanup_scapy(self):
-        """ Cleanup scapy """
-        if self.scapy_install_path:
-            cmd = f'rm -rf {self.scapy_install_path}'
-            self.log.info(f'Cleaning up scapy {cmd}')
-            output = self.ssh.run(cmd)
-            self.scapy_install_path = None
-
-    def send_ra(self,
-                iface,
-                mac=RA_MULTICAST_ADDR,
-                interval=1,
-                count=None,
-                lifetime=LIFETIME,
-                rtt=0):
-        """Invoke scapy and send RA to the device.
-
-        Args:
-          iface: string of the WiFi interface to use for sending packets.
-          mac: string HWAddr/MAC address to send the packets to.
-          interval: int Time to sleep between consecutive packets.
-          count: int Number of packets to be sent.
-          lifetime: int original RA's router lifetime in seconds.
-          rtt: retrans timer of the RA packet
-        """
-        scapy_command = os.path.join(self.scapy_install_path, RA_SCRIPT)
-        options = f' -m {mac} -i {interval} -c {count} -l {lifetime} -in {iface} -rtt {rtt}'
-        cmd = scapy_command + options
-        self.log.info(f'Scapy cmd: {cmd}')
-        self.ssh.run(cmd)
-
-    def get_icmp6intype134(self):
-        """Read the value of Icmp6InType134 and return integer.
-
-        Returns:
-            Integer value >0 if grep is successful; 0 otherwise.
-        """
-        ra_count_str = self.ssh.run(
-            f'grep Icmp6InType134 {PROC_NET_SNMP6} || true').stdout
-        if ra_count_str:
-            return int(ra_count_str.split()[1])
-
-    def ping(self,
-             dest_ip,
-             count=3,
-             interval=1000,
-             timeout=1000,
-             size=56,
-             additional_ping_params=None):
-        """Pings from AP to dest_ip, returns dict of ping stats (see utils.ping)
-        """
-        return utils.ping(self.ssh,
-                          dest_ip,
-                          count=count,
-                          interval=interval,
-                          timeout=timeout,
-                          size=size,
-                          additional_ping_params=additional_ping_params)
-
-    def can_ping(self,
-                 dest_ip,
-                 count=1,
-                 interval=1000,
-                 timeout=1000,
-                 size=56,
-                 additional_ping_params=None):
+    def can_ping(
+        self,
+        dest_ip: str,
+        count: int = 1,
+        interval: int = 1000,
+        timeout: int = 1000,
+        size: int = 56,
+        additional_ping_params: Optional[Any] = None,
+    ) -> bool:
         """Returns whether ap can ping dest_ip (see utils.can_ping)"""
-        return utils.can_ping(self.ssh,
-                              dest_ip,
-                              count=count,
-                              interval=interval,
-                              timeout=timeout,
-                              size=size,
-                              additional_ping_params=additional_ping_params)
+        return utils.can_ping(
+            self.ssh,
+            dest_ip,
+            count=count,
+            interval=interval,
+            timeout=timeout,
+            size=size,
+            additional_ping_params=additional_ping_params,
+        )
 
-    def hard_power_cycle(self,
-                         pdus,
-                         unreachable_timeout=30,
-                         ping_timeout=60,
-                         ssh_timeout=30,
-                         hostapd_configs=None):
+    def hard_power_cycle(
+        self,
+        pdus: List[PduDevice],
+        hostapd_configs: Optional[List[HostapdConfig]] = None,
+    ) -> None:
         """Kills, then restores power to AccessPoint, verifying it goes down and
         comes back online cleanly.
 
         Args:
-            pdus: list, PduDevices in the testbed
-            unreachable_timeout: int, time to wait for AccessPoint to become
-                unreachable
-            ping_timeout: int, time to wait for AccessPoint to responsd to pings
-            ssh_timeout: int, time to wait for AccessPoint to allow SSH
-            hostapd_configs (optional): list, containing hostapd settings. If
-                present, these networks will be spun up after the AP has
-                rebooted. This list can either contain HostapdConfig objects, or
-                    dictionaries with the start_ap params
+            pdus: PDUs in the testbed
+            hostapd_configs: Hostapd settings. If present, these networks will
+                be spun up after the AP has rebooted. This list can either
+                contain HostapdConfig objects, or dictionaries with the start_ap
+                params
                     (i.e  { 'hostapd_config': <HostapdConfig>,
                             'setup_bridge': <bool>,
                             'additional_parameters': <dict> } ).
@@ -818,132 +643,223 @@
             ConnectionError, if AccessPoint fails to go offline or come back.
         """
         if not self.device_pdu_config:
-            raise Error('No PduDevice provided in AccessPoint config.')
+            raise Error("No PduDevice provided in AccessPoint config.")
 
         if hostapd_configs is None:
             hostapd_configs = []
 
-        self.log.info(f'Power cycling')
-        ap_pdu, ap_pdu_port = pdu.get_pdu_port_for_device(
-            self.device_pdu_config, pdus)
+        self.log.info(f"Power cycling")
+        ap_pdu, ap_pdu_port = get_pdu_port_for_device(self.device_pdu_config, pdus)
 
-        self.log.info(f'Killing power')
+        self.log.info(f"Killing power")
         ap_pdu.off(str(ap_pdu_port))
 
-        self.log.info('Verifying AccessPoint is unreachable.')
-        timeout = time.time() + unreachable_timeout
-        while time.time() < timeout:
-            if not utils.can_ping(job, self.ssh_settings.hostname):
-                self.log.info('AccessPoint is unreachable as expected.')
-                break
-            else:
-                self.log.debug(
-                    'AccessPoint is still responding to pings. Retrying in 1 '
-                    'second.')
-                time.sleep(1)
-        else:
-            raise ConnectionError(
-                f'Failed to bring down AccessPoint ({self.ssh_settings.hostname})'
-            )
+        self.log.info("Verifying AccessPoint is unreachable.")
+        self.ssh_provider.wait_until_unreachable()
+        self.log.info("AccessPoint is unreachable as expected.")
+
         self._aps.clear()
 
-        self.log.info(f'Restoring power')
+        self.log.info(f"Restoring power")
         ap_pdu.on(str(ap_pdu_port))
 
-        self.log.info('Waiting for AccessPoint to respond to pings.')
-        timeout = time.time() + ping_timeout
-        while time.time() < timeout:
-            if utils.can_ping(job, self.ssh_settings.hostname):
-                self.log.info('AccessPoint responded to pings.')
-                break
-            else:
-                self.log.debug('AccessPoint is not responding to pings. '
-                               'Retrying in 1 second.')
-                time.sleep(1)
-        else:
-            raise ConnectionError(
-                f'Timed out waiting for AccessPoint ({self.ssh_settings.hostname}) '
-                'to respond to pings.')
-
-        self.log.info('Waiting for AccessPoint to allow ssh connection.')
-        timeout = time.time() + ssh_timeout
-        while time.time() < timeout:
-            try:
-                self.ssh.run('echo')
-            except connection.Error:
-                self.log.debug('AccessPoint is not allowing ssh connection. '
-                               'Retrying in 1 second.')
-                time.sleep(1)
-            else:
-                self.log.info('AccessPoint available via ssh.')
-                break
-        else:
-            raise ConnectionError(
-                f'Timed out waiting for AccessPoint ({self.ssh_settings.hostname}) '
-                'to allow ssh connection.')
+        self.log.info("Waiting for AccessPoint to become available via SSH.")
+        self.ssh_provider.wait_until_reachable()
+        self.log.info("AccessPoint responded to SSH.")
 
         # Allow 5 seconds for OS to finish getting set up
         time.sleep(5)
         self._initial_ap()
-        self.log.info('Power cycled successfully')
+        self.log.info("Power cycled successfully")
 
         for settings in hostapd_configs:
-            if type(settings) == hostapd_config.HostapdConfig:
+            if type(settings) == HostapdConfig:
                 config = settings
                 setup_bridge = False
                 additional_parameters = None
 
             elif type(settings) == dict:
-                config = settings['hostapd_config']
-                setup_bridge = settings.get('setup_bridge', False)
-                additional_parameters = settings.get('additional_parameters',
-                                                     None)
+                config = settings["hostapd_config"]
+                setup_bridge = settings.get("setup_bridge", False)
+                additional_parameters = settings.get("additional_parameters", None)
             else:
                 raise TypeError(
-                    'Items in hostapd_configs list must either be '
-                    'hostapd.HostapdConfig objects or dictionaries.')
+                    "Items in hostapd_configs list must either be "
+                    "HostapdConfig objects or dictionaries."
+                )
 
-            self.log.info(f'Restarting network {config.ssid}')
-            self.start_ap(config,
-                          setup_bridge=setup_bridge,
-                          additional_parameters=additional_parameters)
+            self.log.info(f"Restarting network {config.ssid}")
+            self.start_ap(
+                config,
+                setup_bridge=setup_bridge,
+                additional_parameters=additional_parameters,
+            )
 
-    def channel_switch(self, identifier, channel_num):
+    def channel_switch(self, identifier: str, channel_num: int) -> None:
         """Switch to a different channel on the given AP."""
         if identifier not in list(self._aps.keys()):
-            raise ValueError(f'Invalid identifier {identifier} given')
+            raise ValueError(f"Invalid identifier {identifier} given")
         instance = self._aps.get(identifier)
-        self.log.info(f'channel switch to channel {channel_num}')
+        self.log.info(f"channel switch to channel {channel_num}")
         instance.hostapd.channel_switch(channel_num)
 
-    def get_current_channel(self, identifier):
+    def get_current_channel(self, identifier: str) -> int:
         """Find the current channel on the given AP."""
         if identifier not in list(self._aps.keys()):
-            raise ValueError(f'Invalid identifier {identifier} given')
+            raise ValueError(f"Invalid identifier {identifier} given")
         instance = self._aps.get(identifier)
         return instance.hostapd.get_current_channel()
 
-    def get_stas(self, identifier) -> Set[str]:
+    def get_stas(self, identifier: str) -> Set[str]:
         """Return MAC addresses of all associated STAs on the given AP."""
         if identifier not in list(self._aps.keys()):
-            raise ValueError(f'Invalid identifier {identifier} given')
+            raise ValueError(f"Invalid identifier {identifier} given")
         instance = self._aps.get(identifier)
         return instance.hostapd.get_stas()
 
-    def get_sta_extended_capabilities(self, identifier,
-                                      sta_mac: str) -> ExtendedCapabilities:
+    def get_sta_extended_capabilities(
+        self, identifier: str, sta_mac: str
+    ) -> ExtendedCapabilities:
         """Get extended capabilities for the given STA, as seen by the AP."""
         if identifier not in list(self._aps.keys()):
-            raise ValueError(f'Invalid identifier {identifier} given')
+            raise ValueError(f"Invalid identifier {identifier} given")
         instance = self._aps.get(identifier)
         return instance.hostapd.get_sta_extended_capabilities(sta_mac)
 
     def send_bss_transition_management_req(
-            self, identifier, sta_mac: str,
-            request: BssTransitionManagementRequest):
+        self, identifier: str, sta_mac: str, request: BssTransitionManagementRequest
+    ) -> job.Result:
         """Send a BSS Transition Management request to an associated STA."""
         if identifier not in list(self._aps.keys()):
-            raise ValueError('Invalid identifier {identifier} given')
+            raise ValueError("Invalid identifier {identifier} given")
         instance = self._aps.get(identifier)
-        return instance.hostapd.send_bss_transition_management_req(
-            sta_mac, request)
+        return instance.hostapd.send_bss_transition_management_req(sta_mac, request)
+
+
+def setup_ap(
+    access_point: AccessPoint,
+    profile_name: str,
+    channel: int,
+    ssid: str,
+    mode: Optional[str] = None,
+    preamble: Optional[bool] = None,
+    beacon_interval: Optional[int] = None,
+    dtim_period: Optional[int] = None,
+    frag_threshold: Optional[int] = None,
+    rts_threshold: Optional[int] = None,
+    force_wmm: Optional[bool] = None,
+    hidden: Optional[bool] = False,
+    security: Optional[Security] = None,
+    pmf_support: Optional[int] = None,
+    additional_ap_parameters: Optional[Dict[str, Any]] = None,
+    password: Optional[str] = None,
+    n_capabilities: Optional[List[Any]] = None,
+    ac_capabilities: Optional[List[Any]] = None,
+    vht_bandwidth: Optional[int] = None,
+    wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
+    setup_bridge: bool = False,
+    is_ipv6_enabled: bool = False,
+    is_nat_enabled: bool = True,
+):
+    """Creates a hostapd profile and runs it on an ap. This is a convenience
+    function that allows us to start an ap with a single function, without first
+    creating a hostapd config.
+
+    Args:
+        access_point: An ACTS access_point controller
+        profile_name: The profile name of one of the hostapd ap presets.
+        channel: What channel to set the AP to.
+        preamble: Whether to set short or long preamble
+        beacon_interval: The beacon interval
+        dtim_period: Length of dtim period
+        frag_threshold: Fragmentation threshold
+        rts_threshold: RTS threshold
+        force_wmm: Enable WMM or not
+        hidden: Advertise the SSID or not
+        security: What security to enable.
+        pmf_support: Whether pmf is not disabled, enabled, or required
+        additional_ap_parameters: Additional parameters to send the AP.
+        password: Password to connect to WLAN if necessary.
+        check_connectivity: Whether to check for internet connectivity.
+        wnm_features: WNM features to enable on the AP.
+        setup_bridge: Whether to bridge the LAN interface WLAN interface.
+            Only one WLAN interface can be bridged with the LAN interface
+            and none of the guest networks can be bridged.
+        is_ipv6_enabled: If True, start a IPv6 router advertisement daemon
+        is_nat_enabled: If True, start NAT on the AP to allow the DUT to be able
+            to access the internet if the WAN port is connected to the internet.
+
+    Returns:
+        An identifier for each ssid being started. These identifiers can be
+        used later by this controller to control the ap.
+
+    Raises:
+        Error: When the ap can't be brought up.
+    """
+    ap = create_ap_preset(
+        profile_name=profile_name,
+        iface_wlan_2g=access_point.wlan_2g,
+        iface_wlan_5g=access_point.wlan_5g,
+        channel=channel,
+        ssid=ssid,
+        mode=mode,
+        short_preamble=preamble,
+        beacon_interval=beacon_interval,
+        dtim_period=dtim_period,
+        frag_threshold=frag_threshold,
+        rts_threshold=rts_threshold,
+        force_wmm=force_wmm,
+        hidden=hidden,
+        bss_settings=[],
+        security=security,
+        pmf_support=pmf_support,
+        n_capabilities=n_capabilities,
+        ac_capabilities=ac_capabilities,
+        vht_bandwidth=vht_bandwidth,
+        wnm_features=wnm_features,
+    )
+    return access_point.start_ap(
+        hostapd_config=ap,
+        radvd_config=RadvdConfig() if is_ipv6_enabled else None,
+        setup_bridge=setup_bridge,
+        is_nat_enabled=is_nat_enabled,
+        additional_parameters=additional_ap_parameters,
+    )
+
+
+def create(configs: Any) -> List[AccessPoint]:
+    """Creates ap controllers from a json config.
+
+    Creates an ap controller from either a list, or a single
+    element. The element can either be just the hostname or a dictionary
+    containing the hostname and username of the ap to connect to over ssh.
+
+    Args:
+        The json configs that represent this controller.
+
+    Returns:
+        A new AccessPoint.
+    """
+    return [AccessPoint(c) for c in configs]
+
+
+def destroy(aps: List[AccessPoint]) -> None:
+    """Destroys a list of access points.
+
+    Args:
+        aps: The list of access points to destroy.
+    """
+    for ap in aps:
+        ap.close()
+
+
+def get_info(aps: List[AccessPoint]) -> List[str]:
+    """Get information on a list of access points.
+
+    Args:
+        aps: A list of AccessPoints.
+
+    Returns:
+        A list of all aps hostname.
+    """
+    return [ap.ssh_settings.hostname for ap in aps]
diff --git a/src/antlion/controllers/adb.py b/src/antlion/controllers/adb.py
index 9acfa1e..5c3848d 100644
--- a/src/antlion/controllers/adb.py
+++ b/src/antlion/controllers/adb.py
@@ -26,19 +26,19 @@
 DEFAULT_ADB_TIMEOUT = 60
 DEFAULT_ADB_PULL_TIMEOUT = 180
 
-ADB_REGEX = re.compile('adb:')
+ADB_REGEX = re.compile("adb:")
 # Uses a regex to be backwards compatible with previous versions of ADB
 # (N and above add the serial to the error msg).
-DEVICE_NOT_FOUND_REGEX = re.compile('error: device (?:\'.*?\' )?not found')
-DEVICE_OFFLINE_REGEX = re.compile('error: device offline')
+DEVICE_NOT_FOUND_REGEX = re.compile("error: device (?:'.*?' )?not found")
+DEVICE_OFFLINE_REGEX = re.compile("error: device offline")
 # Raised when adb forward commands fail to forward a port.
-CANNOT_BIND_LISTENER_REGEX = re.compile('error: cannot bind listener:')
+CANNOT_BIND_LISTENER_REGEX = re.compile("error: cannot bind listener:")
 # Expected output is "Android Debug Bridge version 1.0.XX
-ADB_VERSION_REGEX = re.compile('Android Debug Bridge version 1.0.(\d+)')
-GREP_REGEX = re.compile('grep(\s+)')
+ADB_VERSION_REGEX = re.compile("Android Debug Bridge version 1.0.(\d+)")
+GREP_REGEX = re.compile("grep(\s+)")
 
-ROOT_USER_ID = '0'
-SHELL_USER_ID = '2000'
+ROOT_USER_ID = "0"
+SHELL_USER_ID = "2000"
 
 
 def parsing_parcel_output(output):
@@ -51,8 +51,8 @@
         0x00000020: 00350034 00330035 00320038 00310033 '4.5.5.3.8.2.3.1.'
         0x00000030: 00000000                            '....            ')
     """
-    output = ''.join(re.findall(r"'(.*)'", output))
-    return re.sub(r'[.\s]', '', output)
+    output = "".join(re.findall(r"'(.*)'", output))
+    return re.sub(r"[.\s]", "", output)
 
 
 class AdbProxy(object):
@@ -75,7 +75,7 @@
         """
         self.serial = serial
         self._server_local_port = None
-        adb_path = shutil.which('adb')
+        adb_path = shutil.which("adb")
         adb_cmd = [shlex.quote(adb_path)]
         if serial:
             adb_cmd.append("-s %s" % serial)
@@ -87,8 +87,10 @@
             temp_dir = ssh_connection.run("mktemp -d").stdout.strip()
             ssh_connection.send_file(adb_path, temp_dir)
             # Start up a new adb server running as root from the copied binary.
-            remote_adb_cmd = "%s/adb %s root" % (temp_dir, "-s %s" % serial
-                                                 if serial else "")
+            remote_adb_cmd = "%s/adb %s root" % (
+                temp_dir,
+                "-s %s" % serial if serial else "",
+            )
             ssh_connection.run(remote_adb_cmd)
             # Proxy a local port to the adb server port
             local_port = ssh_connection.create_ssh_tunnel(5037)
@@ -101,7 +103,7 @@
 
     def get_user_id(self):
         """Returns the adb user. Either 2000 (shell) or 0 (root)."""
-        return self.shell('id -u')
+        return self.shell("id -u")
 
     def is_root(self, user_id=None):
         """Checks if the user is root.
@@ -156,13 +158,19 @@
             AdbCommandError for errors from commands executed through ADB.
         """
         if isinstance(cmd, list):
-            cmd = ' '.join(cmd)
+            cmd = " ".join(cmd)
         result = job.run(cmd, ignore_status=True, timeout=timeout)
         ret, out, err = result.exit_status, result.stdout, result.stderr
 
-        if any(pattern.match(err) for pattern in
-               [ADB_REGEX, DEVICE_OFFLINE_REGEX, DEVICE_NOT_FOUND_REGEX,
-                CANNOT_BIND_LISTENER_REGEX]):
+        if any(
+            pattern.match(err)
+            for pattern in [
+                ADB_REGEX,
+                DEVICE_OFFLINE_REGEX,
+                DEVICE_NOT_FOUND_REGEX,
+                CANNOT_BIND_LISTENER_REGEX,
+            ]
+        ):
             raise AdbError(cmd=cmd, stdout=out, stderr=err, ret_code=ret)
         if "Result: Parcel" in out:
             return parsing_parcel_output(out)
@@ -173,8 +181,7 @@
         return out
 
     def _exec_adb_cmd(self, name, arg_str, **kwargs):
-        return self._exec_cmd(' '.join((self.adb_str, name, arg_str)),
-                              **kwargs)
+        return self._exec_cmd(" ".join((self.adb_str, name, arg_str)), **kwargs)
 
     def _exec_cmd_nb(self, cmd, **kwargs):
         """Executes adb commands in a new shell, non blocking.
@@ -186,8 +193,7 @@
         return job.run_async(cmd, **kwargs)
 
     def _exec_adb_cmd_nb(self, name, arg_str, **kwargs):
-        return self._exec_cmd_nb(' '.join((self.adb_str, name, arg_str)),
-                                 **kwargs)
+        return self._exec_cmd_nb(" ".join((self.adb_str, name, arg_str)), **kwargs)
 
     def tcp_forward(self, host_port, device_port):
         """Starts tcp forwarding from localhost to this android device.
@@ -206,9 +212,11 @@
             #     device port
             remote_port = self._ssh_connection.find_free_port()
             host_port = self._ssh_connection.create_ssh_tunnel(
-                remote_port, local_port=host_port)
-        output = self.forward("tcp:%d tcp:%d" % (host_port, device_port),
-                              ignore_status=True)
+                remote_port, local_port=host_port
+            )
+        output = self.forward(
+            "tcp:%d tcp:%d" % (host_port, device_port), ignore_status=True
+        )
         # If hinted_port is 0, the output will be the selected port.
         # Otherwise, there will be no output upon successfully
         # forwarding the hinted port.
@@ -229,8 +237,9 @@
         if self._ssh_connection:
             remote_port = self._ssh_connection.close_ssh_tunnel(host_port)
             if remote_port is None:
-                logging.warning("Cannot close unknown forwarded tcp port: %d",
-                                host_port)
+                logging.warning(
+                    "Cannot close unknown forwarded tcp port: %d", host_port
+                )
                 return
             # The actual port we need to disable via adb is on the remote host.
             host_port = remote_port
@@ -254,20 +263,18 @@
     # command.
     def shell(self, command, ignore_status=False, timeout=DEFAULT_ADB_TIMEOUT):
         return self._exec_adb_cmd(
-            'shell',
-            shlex.quote(command),
-            ignore_status=ignore_status,
-            timeout=timeout)
+            "shell", shlex.quote(command), ignore_status=ignore_status, timeout=timeout
+        )
 
     def shell_nb(self, command):
-        return self._exec_adb_cmd_nb('shell', shlex.quote(command))
+        return self._exec_adb_cmd_nb("shell", shlex.quote(command))
 
     def __getattr__(self, name):
         def adb_call(*args, **kwargs):
-            clean_name = name.replace('_', '-')
-            if clean_name in ['pull', 'push', 'remount'] and 'timeout' not in kwargs:
-                kwargs['timeout'] = DEFAULT_ADB_PULL_TIMEOUT
-            arg_str = ' '.join(str(elem) for elem in args)
+            clean_name = name.replace("_", "-")
+            if clean_name in ["pull", "push", "remount"] and "timeout" not in kwargs:
+                kwargs["timeout"] = DEFAULT_ADB_PULL_TIMEOUT
+            arg_str = " ".join(str(elem) for elem in args)
             return self._exec_adb_cmd(clean_name, arg_str, **kwargs)
 
         return adb_call
@@ -282,7 +289,9 @@
         match = re.search(ADB_VERSION_REGEX, version_output)
 
         if not match:
-            logging.error('Unable to capture ADB version from adb version '
-                          'output: %s' % version_output)
-            raise AdbError('adb version', version_output, '', '')
+            logging.error(
+                "Unable to capture ADB version from adb version "
+                "output: %s" % version_output
+            )
+            raise AdbError("adb version", version_output, "", "")
         return int(match.group(1))
diff --git a/src/antlion/controllers/adb_lib/error.py b/src/antlion/controllers/adb_lib/error.py
index 6fcae7d..9599214 100644
--- a/src/antlion/controllers/adb_lib/error.py
+++ b/src/antlion/controllers/adb_lib/error.py
@@ -28,8 +28,12 @@
         self.ret_code = ret_code
 
     def __str__(self):
-        return ("Error executing adb cmd '%s'. ret: %d, stdout: %s, stderr: %s"
-                ) % (self.cmd, self.ret_code, self.stdout, self.stderr)
+        return ("Error executing adb cmd '%s'. ret: %d, stdout: %s, stderr: %s") % (
+            self.cmd,
+            self.ret_code,
+            self.stdout,
+            self.stderr,
+        )
 
 
 class AdbCommandError(AdbError):
diff --git a/src/antlion/controllers/amarisoft_lib/OWNERS b/src/antlion/controllers/amarisoft_lib/OWNERS
deleted file mode 100644
index edee4ef..0000000
--- a/src/antlion/controllers/amarisoft_lib/OWNERS
+++ /dev/null
@@ -1,4 +0,0 @@
-markusliu@google.com
-mollychang@google.com
-angelayu@google.com
-zoeyliu@google.com
diff --git a/src/antlion/controllers/amarisoft_lib/amarisoft_client.py b/src/antlion/controllers/amarisoft_lib/amarisoft_client.py
deleted file mode 100644
index 5386f81..0000000
--- a/src/antlion/controllers/amarisoft_lib/amarisoft_client.py
+++ /dev/null
@@ -1,221 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import asyncio
-import json
-import logging
-from typing import Any, Mapping, Optional, Tuple
-
-from antlion.controllers.amarisoft_lib import ssh_utils
-import immutabledict
-import websockets
-
-_CONFIG_DIR_MAPPING = immutabledict.immutabledict({
-    'enb': '/config/enb.cfg',
-    'mme': '/config/mme.cfg',
-    'ims': '/config/ims.cfg',
-    'mbms': '/config/mbmsgw.cfg',
-    'ots': '/config/ots.cfg'
-})
-
-
-class MessageFailureError(Exception):
-  """Raises an error when the message execution fail."""
-
-
-class AmariSoftClient(ssh_utils.RemoteClient):
-  """The SSH client class interacts with Amarisoft.
-
-    A simulator used to simulate the base station can output different signals
-    according to the network configuration settings.
-    For example: T Mobile NSA LTE band 66 + NR band 71.
-  """
-
-  async def _send_message_to_callbox(self, uri: str,
-                                     msg: str) -> Tuple[str, str]:
-    """Implements async function for send message to the callbox.
-
-    Args:
-      uri: The uri of specific websocket interface.
-      msg: The message to be send to callbox.
-
-    Returns:
-      The response from callbox.
-    """
-    async with websockets.connect(
-        uri, extra_headers={'origin': 'Test'}) as websocket:
-      await websocket.send(msg)
-      head = await websocket.recv()
-      body = await websocket.recv()
-    return head, body
-
-  def send_message(self, port: str, msg: str) -> Tuple[str, str]:
-    """Sends a message to the callbox.
-
-    Args:
-      port: The port of specific websocket interface.
-      msg: The message to be send to callbox.
-
-    Returns:
-      The response from callbox.
-    """
-    return asyncio.get_event_loop().run_until_complete(
-        self._send_message_to_callbox(f'ws://{self.host}:{port}/', msg))
-
-  def verify_response(self, func: str, head: str,
-                      body: str) -> Tuple[Mapping[str, Any], Mapping[str, Any]]:
-    """Makes sure there are no error messages in Amarisoft's response.
-
-    If a message produces an error, response will have an error string field
-    representing the error.
-    For example:
-      {
-        "message": "ready",
-        "message_id": <message id>,
-        "error": <error message>,
-        "type": "ENB",
-        "name: <name>,
-      }
-
-    Args:
-      func: The message send to Amarisoft.
-      head: Responsed message head.
-      body: Responsed message body.
-
-    Returns:
-      Standard output of the shell command.
-
-    Raises:
-       MessageFailureError: Raised when an error occurs in the response message.
-    """
-    loaded_head = json.loads(head)
-    loaded_body = json.loads(body)
-
-    if loaded_head.get('message') != 'ready':
-      raise MessageFailureError(
-          f'Fail to get response from callbox, message: {loaded_head["error"]}')
-    if 'error' in loaded_body:
-      raise MessageFailureError(
-          f'Fail to excute {func} with error message: {loaded_body["error"]}')
-    if loaded_body.get('message') != func:
-      raise MessageFailureError(
-          f'The message sent was {loaded_body["message"]} instead of {func}.')
-    return loaded_head, loaded_body
-
-  def lte_service_stop(self) -> None:
-    """Stops to output signal."""
-    self.run_cmd('systemctl stop lte')
-
-  def lte_service_start(self):
-    """Starts to output signal."""
-    self.run_cmd('systemctl start lte')
-
-  def lte_service_restart(self):
-    """Restarts to output signal."""
-    self.run_cmd('systemctl restart lte')
-
-  def lte_service_enable(self):
-    """lte service remains enable until next reboot."""
-    self.run_cmd('systemctl enable lte')
-
-  def lte_service_disable(self):
-    """lte service remains disable until next reboot."""
-    self.run_cmd('systemctl disable lte')
-
-  def lte_service_is_active(self) -> bool:
-    """Checks lte service is active or not.
-
-    Returns:
-      True if service active, False otherwise.
-    """
-    return not any('inactive' in line
-                   for line in self.run_cmd('systemctl is-active lte'))
-
-  def set_config_dir(self, cfg_type: str, path: str) -> None:
-    """Sets the path of target configuration file.
-
-    Args:
-      cfg_type: The type of target configuration. (e.g. mme, enb ...etc.)
-      path: The path of target configuration. (e.g.
-        /root/lteenb-linux-2020-12-14)
-    """
-    path_old = self.get_config_dir(cfg_type)
-    if path != path_old:
-      logging.info('set new path %s (was %s)', path, path_old)
-      self.run_cmd(f'ln -sfn {path} /root/{cfg_type}')
-    else:
-      logging.info('path %s does not change.', path_old)
-
-  def get_config_dir(self, cfg_type: str) -> Optional[str]:
-    """Gets the path of target configuration.
-
-    Args:
-      cfg_type: Target configuration type. (e.g. mme, enb...etc.)
-
-    Returns:
-      The path of configuration.
-    """
-    result = self.run_cmd(f'readlink /root/{cfg_type}')
-    if result:
-      path = result[0].strip()
-    else:
-      logging.warning('%s path not found.', cfg_type)
-      return None
-    return path
-
-  def set_config_file(self, cfg_type: str, cfg_file: str) -> None:
-    """Sets the configuration to be executed.
-
-    Args:
-      cfg_type: The type of target configuration. (e.g. mme, enb...etc.)
-      cfg_file: The configuration to be executed. (e.g.
-        /root/lteenb-linux-2020-12-14/config/gnb.cfg )
-
-    Raises:
-      FileNotFoundError: Raised when a file or directory is requested but
-      doesn’t exist.
-    """
-    cfg_link = self.get_config_dir(cfg_type) + _CONFIG_DIR_MAPPING[cfg_type]
-    if not self.is_file_exist(cfg_file):
-      raise FileNotFoundError("The command file doesn't exist")
-    self.run_cmd(f'ln -sfn {cfg_file} {cfg_link}')
-
-  def get_config_file(self, cfg_type: str) -> Optional[str]:
-    """Gets the current configuration of specific configuration type.
-
-    Args:
-      cfg_type: The type of target configuration. (e.g. mme, enb...etc.)
-
-    Returns:
-      The current configuration with absolute path.
-    """
-    cfg_path = self.get_config_dir(cfg_type) + _CONFIG_DIR_MAPPING[cfg_type]
-    if cfg_path:
-      result = self.run_cmd(f'readlink {cfg_path}')
-      if result:
-        return result[0].strip()
-
-  def get_all_config_dir(self) -> Mapping[str, str]:
-    """Gets all configuration directions.
-
-    Returns:
-      All configuration directions.
-    """
-    config_dir = {}
-    for cfg_type in ('ots', 'enb', 'mme', 'mbms'):
-      config_dir[cfg_type] = self.get_config_dir(cfg_type)
-      logging.debug('get path of %s: %s', cfg_type, config_dir[cfg_type])
-    return config_dir
diff --git a/src/antlion/controllers/amarisoft_lib/amarisoft_constants.py b/src/antlion/controllers/amarisoft_lib/amarisoft_constants.py
deleted file mode 100644
index c62bf2a..0000000
--- a/src/antlion/controllers/amarisoft_lib/amarisoft_constants.py
+++ /dev/null
@@ -1,14 +0,0 @@
-"""Constants for test."""

-

-

-# ports of lte service websocket interface

-class PortNumber:

-  URI_MME = '9000'

-  URI_ENB = '9001'

-  URI_UE = '9002'

-  URI_IMS = '9003'

-  URI_MBMS = '9004'

-  URI_PROBE = '9005'

-  URI_LICENSE = '9006'

-  URI_MON = '9007'

-  URI_VIEW = '9008'

diff --git a/src/antlion/controllers/amarisoft_lib/config_utils.py b/src/antlion/controllers/amarisoft_lib/config_utils.py
deleted file mode 100644
index 8d3b603..0000000
--- a/src/antlion/controllers/amarisoft_lib/config_utils.py
+++ /dev/null
@@ -1,201 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import enum
-import os
-import immutabledict
-
-from antlion.controllers.amarisoft_lib import amarisoft_client
-
-TEMPLATE_PATH = os.path.dirname(os.path.abspath(__file__)) + '/config_templates'
-TEMPLATE_PATH_ENB = f'{TEMPLATE_PATH}/enb/'
-TEMPLATE_PATH_MME = f'{TEMPLATE_PATH}/mme/'
-
-_CLIENT_CONFIG_DIR_MAPPING = immutabledict.immutabledict({
-    'enb': '/config/mhtest_enb.cfg',
-    'mme': '/config/mhtest_mme.cfg',
-})
-
-
-class EnbCfg():
-  """MME configuration templates."""
-  ENB_GENERIC = 'enb-single-generic.cfg'
-  GNB_NSA_GENERIC = 'gnb-nsa-lte-ho-generic.cfg'
-  GNB_SA_GENERIC = 'gnb-sa-lte-ho-generic.cfg'
-
-
-class MmeCfg():
-  """MME configuration templates."""
-  MME_GENERIC = 'mme-generic.cfg'
-
-
-class SpecTech(enum.Enum):
-  """Spectrum usage techniques."""
-  FDD = 0
-  TDD = 1
-
-
-class ConfigUtils():
-  """Utilities for set Amarisoft configs.
-
-  Attributes:
-    remote: An amarisoft client.
-  """
-
-  def __init__(self, remote: amarisoft_client.AmariSoftClient):
-    self.remote = remote
-
-  def upload_enb_template(self, cfg: str) -> bool:
-    """Loads ENB configuration.
-
-    Args:
-      cfg: The ENB configuration to be loaded.
-
-    Returns:
-      True if the ENB configuration was loaded successfully, False otherwise.
-    """
-    cfg_template = TEMPLATE_PATH_ENB + cfg
-    if not os.path.isfile(cfg_template):
-      return False
-    cfg_path = self.remote.get_config_dir(
-        'enb') + _CLIENT_CONFIG_DIR_MAPPING['enb']
-    self.remote.run_cmd('rm -f ' + cfg_path)
-    self.remote.sftp_upload(cfg_template, cfg_path)
-    self.remote.set_config_file('enb', cfg_path)
-    if not self.remote.is_file_exist(cfg_path):
-      return False
-    return True
-
-  def upload_mme_template(self, cfg: str) -> bool:
-    """Loads MME configuration.
-
-    Args:
-      cfg: The MME configuration to be loaded.
-
-    Returns:
-      True if the ENB configuration was loaded successfully, False otherwise.
-    """
-    cfg_template = TEMPLATE_PATH_MME + cfg
-    if not os.path.isfile(cfg_template):
-      return False
-    cfg_path = self.remote.get_config_dir(
-        'mme') + _CLIENT_CONFIG_DIR_MAPPING['mme']
-    self.remote.run_cmd('rm -f ' + cfg_path)
-    self.remote.sftp_upload(cfg_template, cfg_path)
-    self.remote.set_config_file('mme', cfg_path)
-    if not self.remote.is_file_exist(cfg_path):
-      return False
-    return True
-
-  def enb_set_plmn(self, plmn: str) -> bool:
-    """Sets the PLMN in ENB configuration.
-
-    Args:
-      plmn: The PLMN to be set. ex: 311480
-
-    Returns:
-      True if set PLMN successfully, False otherwise.
-    """
-    cfg_path = self.remote.get_config_dir(
-        'enb') + _CLIENT_CONFIG_DIR_MAPPING['enb']
-    if not self.remote.is_file_exist(cfg_path):
-      return False
-    string_from = '#define PLMN \"00101\"'
-    string_to = f'#define PLMN \"{plmn}\"'
-    self.remote.run_cmd(f'sed -i \'s/\\r//g\' {cfg_path}')
-    self.remote.run_cmd(
-        f'sed -i \':a;N;$!ba;s/{string_from}/{string_to}/g\' {cfg_path}')
-    return True
-
-  def mme_set_plmn(self, plmn: str) -> bool:
-    """Sets the PLMN in MME configuration.
-
-    Args:
-      plmn: The PLMN to be set. ex:'311480'
-
-    Returns:
-      True if set PLMN successfully, False otherwise.
-    """
-    cfg_path = self.remote.get_config_dir(
-        'mme') + _CLIENT_CONFIG_DIR_MAPPING['mme']
-    if not self.remote.is_file_exist(cfg_path):
-      return False
-    string_from = '#define PLMN \"00101\"'
-    string_to = f'#define PLMN \"{plmn}\"'
-    self.remote.run_cmd(f'sed -i \'s/\\r//g\' {cfg_path}')
-    self.remote.run_cmd(
-        f'sed -i \':a;N;$!ba;s/{string_from}/{string_to}/g\' {cfg_path}')
-    return True
-
-  def enb_set_fdd_arfcn(self, arfcn: int) -> bool:
-    """Sets the FDD ARFCN in ENB configuration.
-
-    Args:
-      arfcn: The arfcn to be set. ex: 1400
-
-    Returns:
-      True if set FDD ARFCN successfully, False otherwise.
-    """
-    cfg_path = self.remote.get_config_dir(
-        'enb') + _CLIENT_CONFIG_DIR_MAPPING['enb']
-    if not self.remote.is_file_exist(cfg_path):
-      return False
-    string_from = '#define FDD_CELL_earfcn 1400'
-    string_to = f'#define FDD_CELL_earfcn {arfcn}'
-    self.remote.run_cmd(f'sed -i \'s/\\r//g\' {cfg_path}')
-    self.remote.run_cmd(
-        f'sed -i \':a;N;$!ba;s/{string_from}/{string_to}/g\' {cfg_path}')
-    return True
-
-  def enb_set_tdd_arfcn(self, arfcn: int) -> bool:
-    """Sets the TDD ARFCN in ENB configuration.
-
-    Args:
-      arfcn: The arfcn to be set. ex: 1400
-
-    Returns:
-      True if set FDD ARFCN successfully, False otherwise.
-    """
-    cfg_path = self.remote.get_config_dir(
-        'enb') + _CLIENT_CONFIG_DIR_MAPPING['enb']
-    if not self.remote.is_file_exist(cfg_path):
-      return False
-    string_from = '#define TDD_CELL_earfcn 40620'
-    string_to = f'#define TDD_CELL_earfcn {arfcn}'
-    self.remote.run_cmd(f'sed -i \'s/\\r//g\' {cfg_path}')
-    self.remote.run_cmd(
-        f'sed -i \':a;N;$!ba;s/{string_from}/{string_to}/g\' {cfg_path}')
-    return True
-
-  def enb_set_spectrum_tech(self, tech: int) -> bool:
-    """Sets the spectrum usage techniques in ENB configuration.
-
-    Args:
-      tech: the spectrum usage techniques. ex: SpecTech.FDD.name
-
-    Returns:
-      True if set spectrum usage techniques successfully, False otherwise.
-    """
-    cfg_path = self.remote.get_config_dir(
-        'enb') + _CLIENT_CONFIG_DIR_MAPPING['enb']
-    if not self.remote.is_file_exist(cfg_path):
-      return False
-    string_from = '#define TDD 0'
-    string_to = f'#define TDD {tech}'
-    self.remote.run_cmd(f'sed -i \'s/\\r//g\' {cfg_path}')
-    self.remote.run_cmd(
-        f'sed -i \':a;N;$!ba;s/{string_from}/{string_to}/g\' {cfg_path}')
-    return True
diff --git a/src/antlion/controllers/amarisoft_lib/ims.py b/src/antlion/controllers/amarisoft_lib/ims.py
deleted file mode 100644
index ee575c4..0000000
--- a/src/antlion/controllers/amarisoft_lib/ims.py
+++ /dev/null
@@ -1,156 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import logging
-from typing import Any, Mapping, Optional, Union
-
-from antlion.controllers.amarisoft_lib import amarisoft_client
-from antlion.controllers.amarisoft_lib import amarisoft_constants as const
-
-
-class ImsFunctions():
-  """Utilities for Amarisoft's IMS Remote API.
-
-  Attributes:
-    remote: An amarisoft client.
-  """
-
-  def __init__(self, remote: amarisoft_client.AmariSoftClient):
-    self.remote = remote
-
-  def make_call(self,
-              impi: str,
-              impu: str,
-              contact: str,
-              sip_file: str = 'mt_call_qos.sdp',
-              caller: str = 'Amarisoft',
-              duration: int = 30) -> None:
-    """Performs MT call from callbox to test device.
-
-    Args:
-      impi: IMPI (IP Multimedia Private identity) of user to call.
-      impu: IMPU (IP Multimedia Public identity) of user to call.
-      contact: Contact SIP uri of user to call.
-      sip_file: Define file to use as sdp.
-      caller: The number/ID is displayed as the caller.
-      duration: If set, call duration in seconds (The server will close the
-        dialog).
-    """
-    msg = {}
-    msg['message'] = 'mt_call'
-    msg['impi'] = impi
-    msg['impu'] = impu
-    msg['contact'] = contact
-    msg['sip_file'] = sip_file
-    msg['caller'] = caller
-    msg['duration'] = duration
-    dump_msg = json.dumps(msg)
-    logging.debug('mt_call dump msg = %s', dump_msg)
-    head, body = self.remote.send_message(const.PortNumber.URI_IMS, dump_msg)
-    self.remote.verify_response('mt_call', head, body)
-
-  def send_sms(self,
-               text: str,
-               impi: str,
-               sender: Optional[str] = 'Amarisoft') -> None:
-    """Sends SMS to assigned device which connect to Amarisoft.
-
-    Args:
-      text: SMS text to send.
-      impi: IMPI (IP Multimedia Private identity) of user.
-      sender: Sets SMS sender.
-    """
-    msg = {}
-    msg['message'] = 'sms'
-    msg['text'] = text
-    msg['impi'] = impi
-    msg['sender'] = sender
-    dump_msg = json.dumps(msg)
-    logging.debug('send_sms dump msg = %s', dump_msg)
-    head, body = self.remote.send_message(const.PortNumber.URI_IMS, dump_msg)
-    self.remote.verify_response('sms', head, body)
-
-  def send_mms(self, filename: str, sender: str, receiver: str) -> None:
-    """Sends MMS to assigned device which connect to Amarisoft.
-
-    Args:
-      filename: File name with absolute path to send. Extensions jpg, jpeg, png,
-        gif and txt are supported.
-      sender: IMPI (IP Multimedia Private identity) of user.
-      receiver: IMPU (IP Multimedia Public identity) of user.
-    """
-    msg = {}
-    msg['message'] = 'mms'
-    msg['filename'] = filename
-    msg['sender'] = sender
-    msg['receiver'] = receiver
-    dump_msg = json.dumps(msg)
-    logging.debug('send_mms dump msg = %s', dump_msg)
-    head, body = self.remote.send_message(const.PortNumber.URI_IMS, dump_msg)
-    self.remote.verify_response('mms', head, body)
-
-  def users_get(self, registered_only: bool = True) -> Mapping[str, Any]:
-    """Gets users state.
-
-    Args:
-      registered_only: If set, only registered user will be dumped.
-
-    Returns:
-      The user information.
-    """
-    msg = {}
-    msg['message'] = 'users_get'
-    msg['registered_only'] = registered_only
-    dump_msg = json.dumps(msg)
-    logging.debug('users_get dump msg = %s', dump_msg)
-    head, body = self.remote.send_message(const.PortNumber.URI_IMS, dump_msg)
-    _, loaded_body = self.remote.verify_response('users_get', head, body)
-    return loaded_body
-
-  def get_impu(self, impi) -> Union[str, None]:
-    """Obtains the IMPU of the target user according to IMPI.
-
-    Args:
-      impi: IMPI (IP Multimedia Private identity) of user to call. ex:
-        "310260123456785@ims.mnc260.mcc310.3gppnetwork.org"
-
-    Returns:
-      The IMPU of target user.
-    """
-    body = self.users_get(True)
-    for index in range(len(body['users'])):
-      if impi in body['users'][index]['impi']:
-        impu = body['users'][index]['bindings'][0]['impu'][1]
-        return impu
-    return None
-
-  def get_uri(self, impi) -> Union[str, None]:
-    """Obtains the URI of the target user according to IMPI.
-
-    Args:
-      impi: IMPI (IP Multimedia Private identity) of user to call. ex:
-        "310260123456785@ims.mnc260.mcc310.3gppnetwork.org"
-
-    Returns:
-      The URI of target user.
-    """
-    body = self.users_get(True)
-    for index in range(len(body['users'])):
-      if impi in body['users'][index]['impi']:
-        uri = body['users'][index]['bindings'][0]['uri']
-        return uri
-    return None
diff --git a/src/antlion/controllers/amarisoft_lib/mme.py b/src/antlion/controllers/amarisoft_lib/mme.py
deleted file mode 100644
index 6f7ee42..0000000
--- a/src/antlion/controllers/amarisoft_lib/mme.py
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import logging
-
-from antlion.controllers.amarisoft_lib import amarisoft_constants as const
-from antlion.controllers.amarisoft_lib import amarisoft_client
-
-
-class MmeFunctions():
-  """Utilities for Amarisoft's MME Remote API.
-
-  Attributes:
-    remote: An amarisoft client.
-  """
-
-  def __init__(self, remote: amarisoft_client.AmariSoftClient):
-    self.remote = remote
-
-  def pws_write(self, local_id: str, n50: bool = False):
-    """Broadcasts emergency alert message.
-
-    Args:
-      local_id: ID of the message as defined by local identifier in MME
-        configuration file.
-      n50: If True, N50 interface is used, otherwise SBC interface is used. (see TS 23.041)
-    """
-    msg = {}
-    msg['message'] = 'pws_write'
-    msg['local_id'] = local_id
-    msg['nf'] = n50
-    dump_msg = json.dumps(msg)
-    logging.debug('pws_write dump msg = %s', dump_msg)
-    head, body = self.remote.send_message(const.PortNumber.URI_MME, dump_msg)
-    self.remote.verify_response('pws_write', head, body)
-
-  def pws_kill(self, local_id: str, n50: bool = False):
-    """Stops broadcasts emergency alert message.
-
-    Args:
-      local_id: ID of the message as defined by local identifier in MME
-        configuration file.
-      n50: If True, N50 interface is used, otherwise SBC interface is used. (see TS 23.041)
-    """
-    msg = {}
-    msg['message'] = 'pws_kill'
-    msg['local_id'] = local_id
-    msg['nf'] = n50
-    dump_msg = json.dumps(msg)
-    logging.debug('pws_kill dump msg = %s', dump_msg)
-    head, body = self.remote.send_message(const.PortNumber.URI_MME, dump_msg)
-    self.remote.verify_response('pws_kill', head, body)
-
-  def ue_del(self, imsi: str):
-    """Remove UE from the UE database and force disconnect if necessary.
-
-    Args:
-      imsi: IMSI of the UE to delete.
-    """
-    msg = {}
-    msg['message'] = 'ue_del'
-    msg['imsi'] = imsi
-    dump_msg = json.dumps(msg)
-    logging.debug('ue_del dump msg = %s', dump_msg)
-    head, body = self.remote.send_message(const.PortNumber.URI_MME, dump_msg)
-    self.remote.verify_response('ue_del', head, body)
diff --git a/src/antlion/controllers/amarisoft_lib/ssh_utils.py b/src/antlion/controllers/amarisoft_lib/ssh_utils.py
deleted file mode 100644
index fccc1d7..0000000
--- a/src/antlion/controllers/amarisoft_lib/ssh_utils.py
+++ /dev/null
@@ -1,195 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-from typing import Sequence
-
-import paramiko
-
-COMMAND_RETRY_TIMES = 3
-
-
-class RunCommandError(Exception):
-  """Raises an error when run command fail."""
-
-
-class NotConnectedError(Exception):
-  """Raises an error when run command without SSH connect."""
-
-
-class RemoteClient:
-  """The SSH client class interacts with the test machine.
-
-  Attributes:
-    host: A string representing the IP address of amarisoft.
-    port: A string representing the default port of SSH.
-    username: A string representing the username of amarisoft.
-    password: A string representing the password of amarisoft.
-    ssh: A SSH client.
-    sftp: A SFTP client.
-  """
-
-  def __init__(self,
-               host: str,
-               username: str,
-               password: str,
-               port: str = '22') -> None:
-    self.host = host
-    self.port = port
-    self.username = username
-    self.password = password
-    self.ssh = paramiko.SSHClient()
-    self.sftp = None
-
-  def ssh_is_connected(self) -> bool:
-    """Checks SSH connect or not.
-
-    Returns:
-      True if SSH is connected, False otherwise.
-    """
-    return self.ssh and self.ssh.get_transport().is_active()
-
-  def ssh_close(self) -> bool:
-    """Closes the SSH connection.
-
-    Returns:
-      True if ssh session closed, False otherwise.
-    """
-    for _ in range(COMMAND_RETRY_TIMES):
-      if self.ssh_is_connected():
-        self.ssh.close()
-      else:
-        return True
-    return False
-
-  def connect(self) -> bool:
-    """Creats SSH connection.
-
-    Returns:
-      True if success, False otherwise.
-    """
-    for _ in range(COMMAND_RETRY_TIMES):
-      try:
-        self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
-        self.ssh.connect(self.host, self.port, self.username, self.password)
-        self.ssh.get_transport().set_keepalive(1)
-        self.sftp = paramiko.SFTPClient.from_transport(self.ssh.get_transport())
-        return True
-      except Exception:  # pylint: disable=broad-except
-        self.ssh_close()
-    return False
-
-  def run_cmd(self, cmd: str) -> Sequence[str]:
-    """Runs shell command.
-
-    Args:
-      cmd: Command to be executed.
-
-    Returns:
-      Standard output of the shell command.
-
-    Raises:
-       RunCommandError: Raise error when command failed.
-       NotConnectedError: Raised when run command without SSH connect.
-    """
-    if not self.ssh_is_connected():
-      raise NotConnectedError('ssh remote has not been established')
-
-    logging.debug('ssh remote -> %s', cmd)
-    _, stdout, stderr = self.ssh.exec_command(cmd)
-    err = stderr.readlines()
-    if err:
-      logging.error('command failed.')
-      raise RunCommandError(err)
-    return stdout.readlines()
-
-  def is_file_exist(self, file: str) -> bool:
-    """Checks target file exist.
-
-    Args:
-        file: Target file with absolute path.
-
-    Returns:
-        True if file exist, false otherwise.
-    """
-    return any('exist' in line for line in self.run_cmd(
-        f'if [ -f "{file}" ]; then echo -e "exist"; fi'))
-
-  def sftp_upload(self, src: str, dst: str) -> bool:
-    """Uploads a local file to remote side.
-
-    Args:
-      src: The target file with absolute path.
-      dst: The absolute path to put the file with file name.
-      For example:
-        upload('/usr/local/google/home/zoeyliu/Desktop/sample_config.yml',
-        '/root/sample_config.yml')
-
-    Returns:
-      True if file upload success, False otherwise.
-
-    Raises:
-       NotConnectedError: Raised when run command without SSH connect.
-    """
-    if not self.ssh_is_connected():
-      raise NotConnectedError('ssh remote has not been established')
-    if not self.sftp:
-      raise NotConnectedError('sftp remote has not been established')
-
-    logging.info('[local] %s -> [remote] %s', src, dst)
-    self.sftp.put(src, dst)
-    return self.is_file_exist(dst)
-
-  def sftp_download(self, src: str, dst: str) -> bool:
-    """Downloads a file to local.
-
-    Args:
-      src: The target file with absolute path.
-      dst: The absolute path to put the file.
-
-    Returns:
-      True if file download success, False otherwise.
-
-    Raises:
-       NotConnectedError: Raised when run command without SSH connect.
-    """
-    if not self.ssh_is_connected():
-      raise NotConnectedError('ssh remote has not been established')
-    if not self.sftp:
-      raise NotConnectedError('sftp remote has not been established')
-
-    logging.info('[remote] %s -> [local] %s', src, dst)
-    self.sftp.get(src, dst)
-    return self.is_file_exist(dst)
-
-  def sftp_list_dir(self, path: str) -> Sequence[str]:
-    """Lists the names of the entries in the given path.
-
-    Args:
-      path: The path of the list.
-
-    Returns:
-      The names of the entries in the given path.
-
-    Raises:
-       NotConnectedError: Raised when run command without SSH connect.
-    """
-    if not self.ssh_is_connected():
-      raise NotConnectedError('ssh remote has not been established')
-    if not self.sftp:
-      raise NotConnectedError('sftp remote has not been established')
-    return sorted(self.sftp.listdir(path))
-
diff --git a/src/antlion/controllers/android_device.py b/src/antlion/controllers/android_device.py
index 1668d82..0eb0969 100755
--- a/src/antlion/controllers/android_device.py
+++ b/src/antlion/controllers/android_device.py
@@ -53,11 +53,20 @@
 ANDROID_DEVICE_ADB_LOGCAT_PARAM_KEY = "adb_logcat_param"
 ANDROID_DEVICE_EMPTY_CONFIG_MSG = "Configuration is empty, abort!"
 ANDROID_DEVICE_NOT_LIST_CONFIG_MSG = "Configuration should be a list, abort!"
-CRASH_REPORT_PATHS = ("/data/tombstones/", "/data/vendor/ramdump/",
-                      "/data/ramdump/", "/data/vendor/ssrdump",
-                      "/data/vendor/ramdump/bluetooth", "/data/vendor/log/cbd")
-CRASH_REPORT_SKIPS = ("RAMDUMP_RESERVED", "RAMDUMP_STATUS", "RAMDUMP_OUTPUT",
-                      "bluetooth")
+CRASH_REPORT_PATHS = (
+    "/data/tombstones/",
+    "/data/vendor/ramdump/",
+    "/data/ramdump/",
+    "/data/vendor/ssrdump",
+    "/data/vendor/ramdump/bluetooth",
+    "/data/vendor/log/cbd",
+)
+CRASH_REPORT_SKIPS = (
+    "RAMDUMP_RESERVED",
+    "RAMDUMP_STATUS",
+    "RAMDUMP_OUTPUT",
+    "bluetooth",
+)
 ALWAYS_ON_LOG_PATH = "/data/vendor/radio/logs/always-on"
 DEFAULT_QXDM_LOG_PATH = "/data/vendor/radio/diag_logs"
 DEFAULT_SDM_LOG_PATH = "/data/vendor/slog/"
@@ -72,7 +81,7 @@
 WAIT_FOR_DEVICE_TIMEOUT = 180
 ENCRYPTION_WINDOW = "CryptKeeper"
 DEFAULT_DEVICE_PASSWORD = "1111"
-RELEASE_ID_REGEXES = [re.compile(r'\w+\.\d+\.\d+'), re.compile(r'N\w+')]
+RELEASE_ID_REGEXES = [re.compile(r"\w+\.\d+\.\d+"), re.compile(r"N\w+")]
 
 
 def create(configs):
@@ -90,8 +99,7 @@
     elif configs == ANDROID_DEVICE_PICK_ALL_TOKEN:
         ads = get_all_instances()
     elif not isinstance(configs, list):
-        raise errors.AndroidDeviceConfigError(
-            ANDROID_DEVICE_NOT_LIST_CONFIG_MSG)
+        raise errors.AndroidDeviceConfigError(ANDROID_DEVICE_NOT_LIST_CONFIG_MSG)
     elif isinstance(configs[0], str):
         # Configs is a list of serials.
         ads = get_instances(configs)
@@ -104,9 +112,10 @@
     for ad in ads:
         if not ad.is_connected():
             raise errors.AndroidDeviceError(
-                ("Android device %s is specified in config"
-                 " but is not attached.") % ad.serial,
-                serial=ad.serial)
+                ("Android device %s is specified in config" " but is not attached.")
+                % ad.serial,
+                serial=ad.serial,
+            )
     _start_services_on_ads(ads)
     for ad in ads:
         if ad.droid:
@@ -160,7 +169,7 @@
         try:
             ad.start_services()
         except:
-            ad.log.exception('Failed to start some services, abort!')
+            ad.log.exception("Failed to start some services, abort!")
             destroy(running_ads)
             raise
 
@@ -231,46 +240,50 @@
     results = []
     for c in configs:
         try:
-            serial = c.pop('serial')
+            serial = c.pop("serial")
         except KeyError:
             raise errors.AndroidDeviceConfigError(
-                "Required value 'serial' is missing in AndroidDevice config %s."
-                % c)
+                "Required value 'serial' is missing in AndroidDevice config %s." % c
+            )
         client_port = 0
         if ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY in c:
             try:
                 client_port = int(c.pop(ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY))
             except ValueError:
                 raise errors.AndroidDeviceConfigError(
-                    "'%s' is not a valid number for config %s" %
-                    (ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY, c))
+                    "'%s' is not a valid number for config %s"
+                    % (ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY, c)
+                )
         server_port = None
         if ANDROID_DEVICE_SL4A_SERVER_PORT_KEY in c:
             try:
                 server_port = int(c.pop(ANDROID_DEVICE_SL4A_SERVER_PORT_KEY))
             except ValueError:
                 raise errors.AndroidDeviceConfigError(
-                    "'%s' is not a valid number for config %s" %
-                    (ANDROID_DEVICE_SL4A_SERVER_PORT_KEY, c))
+                    "'%s' is not a valid number for config %s"
+                    % (ANDROID_DEVICE_SL4A_SERVER_PORT_KEY, c)
+                )
         forwarded_port = 0
         if ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY in c:
             try:
-                forwarded_port = int(
-                    c.pop(ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY))
+                forwarded_port = int(c.pop(ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY))
             except ValueError:
                 raise errors.AndroidDeviceConfigError(
-                    "'%s' is not a valid number for config %s" %
-                    (ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY, c))
-        ssh_config = c.pop('ssh_config', None)
+                    "'%s' is not a valid number for config %s"
+                    % (ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY, c)
+                )
+        ssh_config = c.pop("ssh_config", None)
         ssh_connection = None
         if ssh_config is not None:
             ssh_settings = settings.from_config(ssh_config)
             ssh_connection = connection.SshConnection(ssh_settings)
-        ad = AndroidDevice(serial,
-                           ssh_connection=ssh_connection,
-                           client_port=client_port,
-                           forwarded_port=forwarded_port,
-                           server_port=server_port)
+        ad = AndroidDevice(
+            serial,
+            ssh_connection=ssh_connection,
+            client_port=client_port,
+            forwarded_port=forwarded_port,
+            server_port=server_port,
+        )
         ad.load_config(c)
         results.append(ad)
     return results
@@ -342,8 +355,8 @@
     filtered = filter_devices(ads, _get_device_filter)
     if not filtered:
         raise ValueError(
-            "Could not find a target device that matches condition: %s." %
-            kwargs)
+            "Could not find a target device that matches condition: %s." % kwargs
+        )
     elif len(filtered) == 1:
         return filtered[0]
     else:
@@ -397,31 +410,32 @@
 
     """
 
-    def __init__(self,
-                 serial='',
-                 ssh_connection=None,
-                 client_port=0,
-                 forwarded_port=0,
-                 server_port=None):
+    def __init__(
+        self,
+        serial="",
+        ssh_connection=None,
+        client_port=0,
+        forwarded_port=0,
+        server_port=None,
+    ):
         self.serial = serial
         # logging.log_path only exists when this is used in an ACTS test run.
-        log_path_base = getattr(logging, 'log_path', '/tmp/logs')
-        self.log_dir = 'AndroidDevice%s' % serial
+        log_path_base = getattr(logging, "log_path", "/tmp/logs")
+        self.log_dir = "AndroidDevice%s" % serial
         self.log_path = os.path.join(log_path_base, self.log_dir)
         self.client_port = client_port
         self.forwarded_port = forwarded_port
         self.server_port = server_port
         self.log = tracelogger.TraceLogger(
-            AndroidDeviceLoggerAdapter(logging.getLogger(),
-                                       {'serial': serial}))
+            AndroidDeviceLoggerAdapter(logging.getLogger(), {"serial": serial})
+        )
         self._event_dispatchers = {}
         self._services = []
         self.register_service(services.AdbLogcatService(self))
         self.register_service(services.Sl4aService(self))
         self.adb_logcat_process = None
         self.adb = adb.AdbProxy(serial, ssh_connection=ssh_connection)
-        self.fastboot = fastboot.FastbootProxy(serial,
-                                               ssh_connection=ssh_connection)
+        self.fastboot = fastboot.FastbootProxy(serial, ssh_connection=ssh_connection)
         if not self.is_bootloader:
             self.root_adb()
         self._ssh_connection = ssh_connection
@@ -474,7 +488,7 @@
         self.start_services()
 
     def register_service(self, service):
-        """Registers the service on the device. """
+        """Registers the service on the device."""
         service.register()
         self._services.append(service)
 
@@ -499,8 +513,9 @@
 
         Stop adb logcat and terminate sl4a sessions if exist.
         """
-        event_bus.post(android_events.AndroidStopServicesEvent(self),
-                       ignore_errors=True)
+        event_bus.post(
+            android_events.AndroidStopServicesEvent(self), ignore_errors=True
+        )
 
     def is_connected(self):
         out = self.adb.devices()
@@ -519,8 +534,7 @@
             device is in bootloader mode.
         """
         if self.is_bootloader:
-            self.log.error("Device is in fastboot mode, could not get build "
-                           "info.")
+            self.log.error("Device is in fastboot mode, could not get build " "info.")
             return
 
         build_id = self.adb.getprop("ro.build.id")
@@ -536,7 +550,7 @@
         info = {
             "build_id": build_id,
             "incremental_build_id": incremental_build_id,
-            "build_type": self.adb.getprop("ro.build.type")
+            "build_type": self.adb.getprop("ro.build.type"),
         }
         return info
 
@@ -548,11 +562,11 @@
         can be added via `add_device_info`.
         """
         info = {
-            'serial': self.serial,
-            'model': self.model,
-            'build_info': self.build_info,
-            'user_added_info': self._user_added_device_info,
-            'flavor': self.flavor
+            "serial": self.serial,
+            "model": self.model,
+            "build_info": self.build_info,
+            "user_added_info": self._user_added_device_info,
+            "flavor": self.flavor,
         }
         return info
 
@@ -571,23 +585,19 @@
         if self._sdk_api_level is not None:
             return self._sdk_api_level
         if self.is_bootloader:
-            self.log.error(
-                'Device is in fastboot mode. Cannot get build info.')
+            self.log.error("Device is in fastboot mode. Cannot get build info.")
             return
-        self._sdk_api_level = int(
-            self.adb.shell('getprop ro.build.version.sdk'))
+        self._sdk_api_level = int(self.adb.shell("getprop ro.build.version.sdk"))
         return self._sdk_api_level
 
     @property
     def is_bootloader(self):
-        """True if the device is in bootloader mode.
-        """
+        """True if the device is in bootloader mode."""
         return self.serial in list_fastboot_devices()
 
     @property
     def is_adb_root(self):
-        """True if adb is running as root for this device.
-        """
+        """True if adb is running as root for this device."""
         try:
             return "0" == self.adb.shell("id -u")
         except AdbError:
@@ -603,9 +613,9 @@
             out = self.fastboot.getvar("product").strip()
             # "out" is never empty because of the "total time" message fastboot
             # writes to stderr.
-            lines = out.split('\n', 1)
+            lines = out.split("\n", 1)
             if lines:
-                tokens = lines[0].split(' ')
+                tokens = lines[0].split(" ")
                 if len(tokens) > 1:
                     return tokens[1].lower()
             return None
@@ -634,8 +644,7 @@
         """Returns the event dispatcher of the first Sl4aSession created."""
         if len(self._sl4a_manager.sessions) > 0:
             session_id = sorted(self._sl4a_manager.sessions.keys())[0]
-            return self._sl4a_manager.sessions[
-                session_id].get_event_dispatcher()
+            return self._sl4a_manager.sessions[session_id].get_event_dispatcher()
         else:
             return None
 
@@ -646,8 +655,7 @@
 
     @property
     def is_adb_logcat_on(self):
-        """Whether there is an ongoing adb logcat collection.
-        """
+        """Whether there is an ongoing adb logcat collection."""
         if self.adb_logcat_process:
             if self.adb_logcat_process.is_running():
                 return True
@@ -656,7 +664,7 @@
                 # if logcat died due to device reboot and sl4a session has
                 # not restarted there is no droid.
                 if self.droid:
-                    self.droid.logI('Logcat died')
+                    self.droid.logI("Logcat died")
                 self.log.info("Logcat to %s died", self.log_path)
                 return False
         return False
@@ -686,9 +694,9 @@
             # skip_sl4a value can be reset from config file
             if hasattr(self, k) and k != "skip_sl4a":
                 raise errors.AndroidDeviceError(
-                    "Attempting to set existing attribute %s on %s" %
-                    (k, self.serial),
-                    serial=self.serial)
+                    "Attempting to set existing attribute %s on %s" % (k, self.serial),
+                    serial=self.serial,
+                )
             setattr(self, k, v)
 
     def root_adb(self):
@@ -702,7 +710,7 @@
 
         for attempt in range(ADB_ROOT_RETRY_COUNT):
             try:
-                self.log.debug('Enabling ADB root mode: attempt %d.' % attempt)
+                self.log.debug("Enabling ADB root mode: attempt %d." % attempt)
                 self.adb.root()
             except AdbError:
                 if attempt == ADB_ROOT_RETRY_COUNT:
@@ -737,12 +745,15 @@
             >>> droid, ed = ad.get_droid()
         """
         self.log.debug(
-            "Creating RPC client_port={}, forwarded_port={}, server_port={}".
-            format(self.client_port, self.forwarded_port, self.server_port))
+            "Creating RPC client_port={}, forwarded_port={}, server_port={}".format(
+                self.client_port, self.forwarded_port, self.server_port
+            )
+        )
         session = self._sl4a_manager.create_session(
             client_port=self.client_port,
             forwarded_port=self.forwarded_port,
-            server_port=self.server_port)
+            server_port=self.server_port,
+        )
         droid = session.rpc_client
         if handle_event:
             ed = session.get_event_dispatcher()
@@ -762,24 +773,31 @@
         """
         for cmd in ("ps -A", "ps"):
             try:
-                out = self.adb.shell('%s | grep "S %s"' % (cmd, package_name),
-                                     ignore_status=True)
+                out = self.adb.shell(
+                    '%s | grep "S %s"' % (cmd, package_name), ignore_status=True
+                )
                 if package_name not in out:
                     continue
                 try:
                     pid = int(out.split()[1])
-                    self.log.info('apk %s has pid %s.', package_name, pid)
+                    self.log.info("apk %s has pid %s.", package_name, pid)
                     return pid
                 except (IndexError, ValueError) as e:
                     # Possible ValueError from string to int cast.
                     # Possible IndexError from split.
                     self.log.warning(
-                        'Command \"%s\" returned output line: '
-                        '\"%s\".\nError: %s', cmd, out, e)
+                        'Command "%s" returned output line: ' '"%s".\nError: %s',
+                        cmd,
+                        out,
+                        e,
+                    )
             except Exception as e:
                 self.log.warning(
-                    'Device fails to check if %s running with \"%s\"\n'
-                    'Exception %s', package_name, cmd, e)
+                    'Device fails to check if %s running with "%s"\n' "Exception %s",
+                    package_name,
+                    cmd,
+                    e,
+                )
         self.log.debug("apk %s is not running", package_name)
         return None
 
@@ -795,17 +813,11 @@
         return self._sl4a_manager.sessions[droid.uid].get_event_dispatcher()
 
     def _is_timestamp_in_range(self, target, log_begin_time, log_end_time):
-        low = acts_logger.logline_timestamp_comparator(log_begin_time,
-                                                       target) <= 0
-        high = acts_logger.logline_timestamp_comparator(log_end_time,
-                                                        target) >= 0
+        low = acts_logger.logline_timestamp_comparator(log_begin_time, target) <= 0
+        high = acts_logger.logline_timestamp_comparator(log_end_time, target) >= 0
         return low and high
 
-    def cat_adb_log(self,
-                    tag,
-                    begin_time,
-                    end_time=None,
-                    dest_path="AdbLogExcerpts"):
+    def cat_adb_log(self, tag, begin_time, end_time=None, dest_path="AdbLogExcerpts"):
         """Takes an excerpt of the adb logcat log from a certain time point to
         current time.
 
@@ -821,21 +833,24 @@
         else:
             log_end_time = acts_logger.epoch_to_log_line_timestamp(end_time)
         self.log.debug("Extracting adb log from logcat.")
-        logcat_path = os.path.join(self.device_log_path,
-                                   'adblog_%s_debug.txt' % self.serial)
+        logcat_path = os.path.join(
+            self.device_log_path, "adblog_%s_debug.txt" % self.serial
+        )
         if not os.path.exists(logcat_path):
             self.log.warning("Logcat file %s does not exist." % logcat_path)
             return
         adb_excerpt_dir = os.path.join(self.log_path, dest_path)
         os.makedirs(adb_excerpt_dir, exist_ok=True)
-        out_name = '%s,%s.txt' % (acts_logger.normalize_log_line_timestamp(
-            log_begin_time), self.serial)
+        out_name = "%s,%s.txt" % (
+            acts_logger.normalize_log_line_timestamp(log_begin_time),
+            self.serial,
+        )
         tag_len = utils.MAX_FILENAME_LEN - len(out_name)
-        out_name = '%s,%s' % (tag[:tag_len], out_name)
+        out_name = "%s,%s" % (tag[:tag_len], out_name)
         adb_excerpt_path = os.path.join(adb_excerpt_dir, out_name)
-        with open(adb_excerpt_path, 'w', encoding='utf-8') as out:
+        with open(adb_excerpt_path, "w", encoding="utf-8") as out:
             in_file = logcat_path
-            with open(in_file, 'r', encoding='utf-8', errors='replace') as f:
+            with open(in_file, "r", encoding="utf-8", errors="replace") as f:
                 while True:
                     line = None
                     try:
@@ -844,21 +859,20 @@
                             break
                     except:
                         continue
-                    line_time = line[:acts_logger.log_line_timestamp_len]
+                    line_time = line[: acts_logger.log_line_timestamp_len]
                     if not acts_logger.is_valid_logline_timestamp(line_time):
                         continue
-                    if self._is_timestamp_in_range(line_time, log_begin_time,
-                                                   log_end_time):
-                        if not line.endswith('\n'):
-                            line += '\n'
+                    if self._is_timestamp_in_range(
+                        line_time, log_begin_time, log_end_time
+                    ):
+                        if not line.endswith("\n"):
+                            line += "\n"
                         out.write(line)
         return adb_excerpt_path
 
-    def search_logcat(self,
-                      matching_string,
-                      begin_time=None,
-                      end_time=None,
-                      logcat_path=None):
+    def search_logcat(
+        self, matching_string, begin_time=None, end_time=None, logcat_path=None
+    ):
         """Search logcat message with given string.
 
         Args:
@@ -887,29 +901,27 @@
               "message_id": "0853"}]
         """
         if not logcat_path:
-            logcat_path = os.path.join(self.device_log_path,
-                                       'adblog_%s_debug.txt' % self.serial)
+            logcat_path = os.path.join(
+                self.device_log_path, "adblog_%s_debug.txt" % self.serial
+            )
         if not os.path.exists(logcat_path):
             self.log.warning("Logcat file %s does not exist." % logcat_path)
             return
-        output = job.run("grep '%s' %s" % (matching_string, logcat_path),
-                         ignore_status=True)
+        output = job.run(
+            "grep '%s' %s" % (matching_string, logcat_path), ignore_status=True
+        )
         if not output.stdout or output.exit_status != 0:
             return []
         if begin_time:
             if not isinstance(begin_time, datetime):
-                log_begin_time = acts_logger.epoch_to_log_line_timestamp(
-                    begin_time)
-                begin_time = datetime.strptime(log_begin_time,
-                                               "%Y-%m-%d %H:%M:%S.%f")
+                log_begin_time = acts_logger.epoch_to_log_line_timestamp(begin_time)
+                begin_time = datetime.strptime(log_begin_time, "%Y-%m-%d %H:%M:%S.%f")
         if end_time:
             if not isinstance(end_time, datetime):
-                log_end_time = acts_logger.epoch_to_log_line_timestamp(
-                    end_time)
-                end_time = datetime.strptime(log_end_time,
-                                             "%Y-%m-%d %H:%M:%S.%f")
+                log_end_time = acts_logger.epoch_to_log_line_timestamp(end_time)
+                end_time = datetime.strptime(log_end_time, "%Y-%m-%d %H:%M:%S.%f")
         result = []
-        logs = re.findall(r'(\S+\s\S+)(.*)', output.stdout)
+        logs = re.findall(r"(\S+\s\S+)(.*)", output.stdout)
         for log in logs:
             time_stamp = log[0]
             time_obj = datetime.strptime(time_stamp, "%Y-%m-%d %H:%M:%S.%f")
@@ -920,18 +932,20 @@
             if end_time and time_obj > end_time:
                 continue
 
-            res = re.findall(r'.*\[(\d+)\]', log[1])
+            res = re.findall(r".*\[(\d+)\]", log[1])
             try:
                 message_id = res[0]
             except:
                 message_id = None
 
-            result.append({
-                "log_message": "".join(log),
-                "time_stamp": time_stamp,
-                "datetime_obj": time_obj,
-                "message_id": message_id
-            })
+            result.append(
+                {
+                    "log_message": "".join(log),
+                    "time_stamp": time_stamp,
+                    "datetime_obj": time_obj,
+                    "message_id": message_id,
+                }
+            )
         return result
 
     def start_adb_logcat(self):
@@ -940,29 +954,30 @@
         """
         if self.is_adb_logcat_on:
             self.log.warning(
-                'Android device %s already has a running adb logcat thread. ' %
-                self.serial)
+                "Android device %s already has a running adb logcat thread. "
+                % self.serial
+            )
             return
         # Disable adb log spam filter. Have to stop and clear settings first
         # because 'start' doesn't support --clear option before Android N.
         self.adb.shell("logpersist.stop --clear", ignore_status=True)
         self.adb.shell("logpersist.start", ignore_status=True)
-        if hasattr(self, 'adb_logcat_param'):
+        if hasattr(self, "adb_logcat_param"):
             extra_params = self.adb_logcat_param
         else:
             extra_params = "-b all"
 
         self.adb_logcat_process = logcat.create_logcat_keepalive_process(
-            self.serial, self.log_dir, extra_params)
+            self.serial, self.log_dir, extra_params
+        )
         self.adb_logcat_process.start()
 
     def stop_adb_logcat(self):
-        """Stops the adb logcat collection subprocess.
-        """
+        """Stops the adb logcat collection subprocess."""
         if not self.is_adb_logcat_on:
             self.log.warning(
-                'Android device %s does not have an ongoing adb logcat ' %
-                self.serial)
+                "Android device %s does not have an ongoing adb logcat " % self.serial
+            )
             return
         # Set the last timestamp to the current timestamp. This may cause
         # a race condition that allows the same line to be logged twice,
@@ -979,8 +994,9 @@
         Returns:
         Linux UID for the apk.
         """
-        output = self.adb.shell("dumpsys package %s | grep userId=" % apk_name,
-                                ignore_status=True)
+        output = self.adb.shell(
+            "dumpsys package %s | grep userId=" % apk_name, ignore_status=True
+        )
         result = re.search(r"userId=(\d+)", output)
         if result:
             return result.group(1)
@@ -997,15 +1013,17 @@
             Version of the given apk.
         """
         try:
-            output = self.adb.shell("dumpsys package %s | grep versionName" %
-                                    package_name)
+            output = self.adb.shell(
+                "dumpsys package %s | grep versionName" % package_name
+            )
             pattern = re.compile(r"versionName=(.+)", re.I)
             result = pattern.findall(output)
             if result:
                 return result[0]
         except Exception as e:
-            self.log.warning("Fail to get the version of package %s: %s",
-                             package_name, e)
+            self.log.warning(
+                "Fail to get the version of package %s: %s", package_name, e
+            )
         self.log.debug("apk %s is not found", package_name)
         return None
 
@@ -1022,13 +1040,16 @@
         try:
             return bool(
                 self.adb.shell(
-                    '(pm list packages | grep -w "package:%s") || true' %
-                    package_name))
+                    '(pm list packages | grep -w "package:%s") || true' % package_name
+                )
+            )
 
         except Exception as err:
             self.log.error(
-                'Could not determine if %s is installed. '
-                'Received error:\n%s', package_name, err)
+                "Could not determine if %s is installed. " "Received error:\n%s",
+                package_name,
+                err,
+            )
             return False
 
     def is_sl4a_installed(self):
@@ -1045,15 +1066,19 @@
         """
         for cmd in ("ps -A", "ps"):
             try:
-                out = self.adb.shell('%s | grep "S %s"' % (cmd, package_name),
-                                     ignore_status=True)
+                out = self.adb.shell(
+                    '%s | grep "S %s"' % (cmd, package_name), ignore_status=True
+                )
                 if package_name in out:
                     self.log.info("apk %s is running", package_name)
                     return True
             except Exception as e:
                 self.log.warning(
-                    "Device fails to check is %s running by %s "
-                    "Exception %s", package_name, cmd, e)
+                    "Device fails to check is %s running by %s " "Exception %s",
+                    package_name,
+                    cmd,
+                    e,
+                )
                 continue
         self.log.debug("apk %s is not running", package_name)
         return False
@@ -1071,8 +1096,7 @@
         True if package is installed. False otherwise.
         """
         try:
-            self.adb.shell('am force-stop %s' % package_name,
-                           ignore_status=True)
+            self.adb.shell("am force-stop %s" % package_name, ignore_status=True)
         except Exception as e:
             self.log.warning("Fail to stop package %s: %s", package_name, e)
 
@@ -1098,7 +1122,8 @@
         os.makedirs(br_path, exist_ok=True)
         epoch = begin_time if begin_time else utils.get_current_epoch_time()
         time_stamp = acts_logger.normalize_log_line_timestamp(
-            acts_logger.epoch_to_log_line_timestamp(epoch))
+            acts_logger.epoch_to_log_line_timestamp(epoch)
+        )
         out_name = "AndroidDevice%s_%s" % (self.serial, time_stamp)
         out_name = "%s.zip" % out_name if new_br else "%s.txt" % out_name
         full_out_path = os.path.join(br_path, out_name)
@@ -1112,25 +1137,24 @@
             out = self.adb.shell("bugreportz", timeout=BUG_REPORT_TIMEOUT)
             if not out.startswith("OK"):
                 raise errors.AndroidDeviceError(
-                    'Failed to take bugreport on %s: %s' % (self.serial, out),
-                    serial=self.serial)
-            br_out_path = out.split(':')[1].strip().split()[0]
+                    "Failed to take bugreport on %s: %s" % (self.serial, out),
+                    serial=self.serial,
+                )
+            br_out_path = out.split(":")[1].strip().split()[0]
             self.adb.pull("%s %s" % (br_out_path, full_out_path))
         else:
-            self.adb.bugreport(" > {}".format(full_out_path),
-                               timeout=BUG_REPORT_TIMEOUT)
+            self.adb.bugreport(
+                " > {}".format(full_out_path), timeout=BUG_REPORT_TIMEOUT
+            )
         if test_name:
-            self.log.info("Bugreport for %s taken at %s.", test_name,
-                          full_out_path)
+            self.log.info("Bugreport for %s taken at %s.", test_name, full_out_path)
         else:
             self.log.info("Bugreport taken at %s.", test_name, full_out_path)
         self.adb.wait_for_device(timeout=WAIT_FOR_DEVICE_TIMEOUT)
 
-    def get_file_names(self,
-                       directory,
-                       begin_time=None,
-                       skip_files=[],
-                       match_string=None):
+    def get_file_names(
+        self, directory, begin_time=None, skip_files=[], match_string=None
+    ):
         """Get files names with provided directory."""
         cmd = "find %s -type f" % directory
         if begin_time:
@@ -1142,8 +1166,12 @@
         for skip_file in skip_files:
             cmd = "%s ! -iname %s" % (cmd, skip_file)
         out = self.adb.shell(cmd, ignore_status=True)
-        if not out or "No such" in out or "Permission denied" in out or \
-            "Not a directory" in out:
+        if (
+            not out
+            or "No such" in out
+            or "Permission denied" in out
+            or "Not a directory" in out
+        ):
             return []
         files = out.split("\n")
         self.log.debug("Find files in directory %s: %s", directory, files)
@@ -1154,7 +1182,7 @@
         """
         The $EXTERNAL_STORAGE path on the device. Most commonly set to '/sdcard'
         """
-        return self.adb.shell('echo $EXTERNAL_STORAGE')
+        return self.adb.shell("echo $EXTERNAL_STORAGE")
 
     def file_exists(self, file_path):
         """Returns whether a file exists on a device.
@@ -1162,14 +1190,16 @@
         Args:
             file_path: The path of the file to check for.
         """
-        cmd = '(test -f %s && echo yes) || echo no' % file_path
+        cmd = "(test -f %s && echo yes) || echo no" % file_path
         result = self.adb.shell(cmd)
-        if result == 'yes':
+        if result == "yes":
             return True
-        elif result == 'no':
+        elif result == "no":
             return False
-        raise ValueError('Couldn\'t determine if %s exists. '
-                         'Expected yes/no, got %s' % (file_path, result[cmd]))
+        raise ValueError(
+            "Couldn't determine if %s exists. "
+            "Expected yes/no, got %s" % (file_path, result[cmd])
+        )
 
     def pull_files(self, device_paths, host_path=None):
         """Pull files from devices.
@@ -1183,39 +1213,37 @@
         if not host_path:
             host_path = self.log_path
         for device_path in device_paths:
-            self.log.info('Pull from device: %s -> %s' %
-                          (device_path, host_path))
-            self.adb.pull("%s %s" % (device_path, host_path),
-                          timeout=PULL_TIMEOUT)
+            self.log.info("Pull from device: %s -> %s" % (device_path, host_path))
+            self.adb.pull("%s %s" % (device_path, host_path), timeout=PULL_TIMEOUT)
 
-    def check_crash_report(self,
-                           test_name=None,
-                           begin_time=None,
-                           log_crash_report=False):
+    def check_crash_report(
+        self, test_name=None, begin_time=None, log_crash_report=False
+    ):
         """check crash report on the device."""
         crash_reports = []
         for crash_path in CRASH_REPORT_PATHS:
             try:
-                cmd = 'cd %s' % crash_path
+                cmd = "cd %s" % crash_path
                 self.adb.shell(cmd)
             except Exception as e:
                 self.log.debug("received exception %s", e)
                 continue
-            crashes = self.get_file_names(crash_path,
-                                          skip_files=CRASH_REPORT_SKIPS,
-                                          begin_time=begin_time)
+            crashes = self.get_file_names(
+                crash_path, skip_files=CRASH_REPORT_SKIPS, begin_time=begin_time
+            )
             if crash_path == "/data/tombstones/" and crashes:
                 tombstones = crashes[:]
                 for tombstone in tombstones:
                     if self.adb.shell(
-                            'cat %s | grep "crash_dump failed to dump process"'
-                            % tombstone):
+                        'cat %s | grep "crash_dump failed to dump process"' % tombstone
+                    ):
                         crashes.remove(tombstone)
             if crashes:
                 crash_reports.extend(crashes)
         if crash_reports and log_crash_report:
-            crash_log_path = os.path.join(self.device_log_path,
-                                          "Crashes_%s" % self.serial)
+            crash_log_path = os.path.join(
+                self.device_log_path, "Crashes_%s" % self.serial
+            )
             os.makedirs(crash_log_path, exist_ok=True)
             self.pull_files(crash_reports, crash_log_path)
         return crash_reports
@@ -1225,35 +1253,38 @@
         # Sleep 10 seconds for the buffered log to be written in qxdm log file
         time.sleep(10)
         log_path = getattr(self, "qxdm_log_path", DEFAULT_QXDM_LOG_PATH)
-        qxdm_logs = self.get_file_names(log_path,
-                                        begin_time=begin_time,
-                                        match_string="*.qmdl")
+        qxdm_logs = self.get_file_names(
+            log_path, begin_time=begin_time, match_string="*.qmdl"
+        )
         if qxdm_logs:
-            qxdm_log_path = os.path.join(self.device_log_path,
-                                         "QXDM_%s" % self.serial)
+            qxdm_log_path = os.path.join(self.device_log_path, "QXDM_%s" % self.serial)
             os.makedirs(qxdm_log_path, exist_ok=True)
 
             self.log.info("Pull QXDM Log %s to %s", qxdm_logs, qxdm_log_path)
             self.pull_files(qxdm_logs, qxdm_log_path)
 
-            self.adb.pull("/firmware/image/qdsp6m.qdb %s" % qxdm_log_path,
-                          timeout=PULL_TIMEOUT,
-                          ignore_status=True)
+            self.adb.pull(
+                "/firmware/image/qdsp6m.qdb %s" % qxdm_log_path,
+                timeout=PULL_TIMEOUT,
+                ignore_status=True,
+            )
             # Zip Folder
-            utils.zip_directory('%s.zip' % qxdm_log_path, qxdm_log_path)
+            utils.zip_directory("%s.zip" % qxdm_log_path, qxdm_log_path)
             shutil.rmtree(qxdm_log_path)
         else:
             self.log.error("Didn't find QXDM logs in %s." % log_path)
         if "Verizon" in self.adb.getprop("gsm.sim.operator.alpha"):
-            omadm_log_path = os.path.join(self.device_log_path,
-                                          "OMADM_%s" % self.serial)
+            omadm_log_path = os.path.join(
+                self.device_log_path, "OMADM_%s" % self.serial
+            )
             os.makedirs(omadm_log_path, exist_ok=True)
             self.log.info("Pull OMADM Log")
             self.adb.pull(
-                "/data/data/com.android.omadm.service/files/dm/log/ %s" %
-                omadm_log_path,
+                "/data/data/com.android.omadm.service/files/dm/log/ %s"
+                % omadm_log_path,
                 timeout=PULL_TIMEOUT,
-                ignore_status=True)
+                ignore_status=True,
+            )
 
     def get_sdm_logs(self, test_name="", begin_time=None):
         """Get sdm logs."""
@@ -1261,31 +1292,32 @@
         time.sleep(10)
         log_paths = [
             ALWAYS_ON_LOG_PATH,
-            getattr(self, "sdm_log_path", DEFAULT_SDM_LOG_PATH)
+            getattr(self, "sdm_log_path", DEFAULT_SDM_LOG_PATH),
         ]
         sdm_logs = []
         for path in log_paths:
-            sdm_logs += self.get_file_names(path,
-                                            begin_time=begin_time,
-                                            match_string="*.sdm*")
+            sdm_logs += self.get_file_names(
+                path, begin_time=begin_time, match_string="*.sdm*"
+            )
         if sdm_logs:
-            sdm_log_path = os.path.join(self.device_log_path,
-                                        "SDM_%s" % self.serial)
+            sdm_log_path = os.path.join(self.device_log_path, "SDM_%s" % self.serial)
             os.makedirs(sdm_log_path, exist_ok=True)
             self.log.info("Pull SDM Log %s to %s", sdm_logs, sdm_log_path)
             self.pull_files(sdm_logs, sdm_log_path)
         else:
             self.log.error("Didn't find SDM logs in %s." % log_paths)
         if "Verizon" in self.adb.getprop("gsm.sim.operator.alpha"):
-            omadm_log_path = os.path.join(self.device_log_path,
-                                          "OMADM_%s" % self.serial)
+            omadm_log_path = os.path.join(
+                self.device_log_path, "OMADM_%s" % self.serial
+            )
             os.makedirs(omadm_log_path, exist_ok=True)
             self.log.info("Pull OMADM Log")
             self.adb.pull(
-                "/data/data/com.android.omadm.service/files/dm/log/ %s" %
-                omadm_log_path,
+                "/data/data/com.android.omadm.service/files/dm/log/ %s"
+                % omadm_log_path,
                 timeout=PULL_TIMEOUT,
-                ignore_status=True)
+                ignore_status=True,
+            )
 
     def start_new_session(self, max_connections=None, server_port=None):
         """Start a new session in sl4a.
@@ -1301,7 +1333,8 @@
             existing uid to a new session.
         """
         session = self._sl4a_manager.create_session(
-            max_connections=max_connections, server_port=server_port)
+            max_connections=max_connections, server_port=server_port
+        )
 
         self._sl4a_manager.sessions[session.uid] = session
         return session.rpc_client
@@ -1313,11 +1346,9 @@
         """
         self._sl4a_manager.terminate_all_sessions()
 
-    def run_iperf_client_nb(self,
-                            server_host,
-                            extra_args="",
-                            timeout=IPERF_TIMEOUT,
-                            log_file_path=None):
+    def run_iperf_client_nb(
+        self, server_host, extra_args="", timeout=IPERF_TIMEOUT, log_file_path=None
+    ):
         """Start iperf client on the device asynchronously.
 
         Return status as true if iperf client start successfully.
@@ -1335,10 +1366,7 @@
             cmd += " --logfile {} &".format(log_file_path)
         self.adb.shell_nb(cmd)
 
-    def run_iperf_client(self,
-                         server_host,
-                         extra_args="",
-                         timeout=IPERF_TIMEOUT):
+    def run_iperf_client(self, server_host, extra_args="", timeout=IPERF_TIMEOUT):
         """Start iperf client on the device.
 
         Return status as true if iperf client start successfully.
@@ -1353,9 +1381,10 @@
             status: true if iperf client start successfully.
             results: results have data flow information
         """
-        out = self.adb.shell("iperf3 -c {} {}".format(server_host, extra_args),
-                             timeout=timeout)
-        clean_out = out.split('\n')
+        out = self.adb.shell(
+            "iperf3 -c {} {}".format(server_host, extra_args), timeout=timeout
+        )
+        clean_out = out.split("\n")
         if "error" in clean_out[0].lower():
             return False, clean_out
         return True, clean_out
@@ -1373,7 +1402,7 @@
             results: results have output of command
         """
         out = self.adb.shell("iperf3 -s {}".format(extra_args))
-        clean_out = out.split('\n')
+        clean_out = out.split("\n")
         if "error" in clean_out[0].lower():
             return False, clean_out
         return True, clean_out
@@ -1393,7 +1422,7 @@
         while time.time() < timeout_start + timeout:
             try:
                 completed = self.adb.getprop("sys.boot_completed")
-                if completed == '1':
+                if completed == "1":
                     self.log.debug("Device has rebooted")
                     return
             except AdbError:
@@ -1402,13 +1431,12 @@
                 pass
             time.sleep(5)
         raise errors.AndroidDeviceError(
-            'Device %s booting process timed out.' % self.serial,
-            serial=self.serial)
+            "Device %s booting process timed out." % self.serial, serial=self.serial
+        )
 
-    def reboot(self,
-               stop_at_lock_screen=False,
-               timeout=180,
-               wait_after_reboot_complete=1):
+    def reboot(
+        self, stop_at_lock_screen=False, timeout=180, wait_after_reboot_complete=1
+    ):
         """Reboots the device.
 
         Terminate all sl4a sessions, reboot the device, wait for device to
@@ -1438,16 +1466,15 @@
         while time.time() < timeout_start + timeout:
             try:
                 self.adb.get_state()
-                time.sleep(.1)
+                time.sleep(0.1)
             except AdbError:
                 # get_state will raise an error if the device is not found. We
                 # want the device to be missing to prove the device has kicked
                 # off the reboot.
                 break
-        self.wait_for_boot_completion(timeout=(timeout - time.time() +
-                                               timeout_start))
+        self.wait_for_boot_completion(timeout=(timeout - time.time() + timeout_start))
 
-        self.log.debug('Wait for a while after boot completion.')
+        self.log.debug("Wait for a while after boot completion.")
         time.sleep(wait_after_reboot_complete)
         self.root_adb()
         skip_sl4a = self.skip_sl4a
@@ -1473,19 +1500,18 @@
 
         self.start_services()
 
-    def get_ipv4_address(self, interface='wlan0', timeout=5):
+    def get_ipv4_address(self, interface="wlan0", timeout=5):
         for timer in range(0, timeout):
             try:
-                ip_string = self.adb.shell('ifconfig %s|grep inet' % interface)
+                ip_string = self.adb.shell("ifconfig %s|grep inet" % interface)
                 break
             except adb.AdbError as e:
                 if timer + 1 == timeout:
-                    self.log.warning('Unable to find IP address for %s.' %
-                                     interface)
+                    self.log.warning("Unable to find IP address for %s." % interface)
                     return None
                 else:
                     time.sleep(1)
-        result = re.search('addr:(.*) Bcast', ip_string)
+        result = re.search("addr:(.*) Bcast", ip_string)
         if result != None:
             ip_address = result.group(1)
             try:
@@ -1499,16 +1525,15 @@
     def get_ipv4_gateway(self, timeout=5):
         for timer in range(0, timeout):
             try:
-                gateway_string = self.adb.shell(
-                    'dumpsys wifi | grep mDhcpResults')
+                gateway_string = self.adb.shell("dumpsys wifi | grep mDhcpResults")
                 break
             except adb.AdbError as e:
                 if timer + 1 == timeout:
-                    self.log.warning('Unable to find gateway')
+                    self.log.warning("Unable to find gateway")
                     return None
                 else:
                     time.sleep(1)
-        result = re.search('Gateway (.*) DNS servers', gateway_string)
+        result = re.search("Gateway (.*) DNS servers", gateway_string)
         if result != None:
             ipv4_gateway = result.group(1)
             try:
@@ -1525,28 +1550,33 @@
     def get_my_current_focus_window(self):
         """Get the current focus window on screen"""
         output = self.adb.shell(
-            'dumpsys window displays | grep -E mCurrentFocus | grep -v null',
-            ignore_status=True)
+            "dumpsys window displays | grep -E mCurrentFocus | grep -v null",
+            ignore_status=True,
+        )
         if not output or "not found" in output or "Can't find" in output:
-            result = ''
+            result = ""
         else:
-            result = output.split(' ')[-1].strip("}")
+            result = output.split(" ")[-1].strip("}")
         self.log.debug("Current focus window is %s", result)
         return result
 
     def get_my_current_focus_app(self):
         """Get the current focus application"""
         dumpsys_cmd = [
-            'dumpsys window | grep -E mFocusedApp',
-            'dumpsys window displays | grep -E mFocusedApp'
+            "dumpsys window | grep -E mFocusedApp",
+            "dumpsys window displays | grep -E mFocusedApp",
         ]
         for cmd in dumpsys_cmd:
             output = self.adb.shell(cmd, ignore_status=True)
-            if not output or "not found" in output or "Can't find" in output or (
-                    "mFocusedApp=null" in output):
-                result = ''
+            if (
+                not output
+                or "not found" in output
+                or "Can't find" in output
+                or ("mFocusedApp=null" in output)
+            ):
+                result = ""
             else:
-                result = output.split(' ')[-2]
+                result = output.split(" ")[-2]
                 break
         self.log.debug("Current focus app is %s", result)
         return result
@@ -1557,18 +1587,16 @@
             return window_name in current_window
         return current_window and ENCRYPTION_WINDOW not in current_window
 
-    def wait_for_window_ready(self,
-                              window_name=None,
-                              check_interval=5,
-                              check_duration=60):
+    def wait_for_window_ready(
+        self, window_name=None, check_interval=5, check_duration=60
+    ):
         elapsed_time = 0
         while elapsed_time < check_duration:
             if self.is_window_ready(window_name=window_name):
                 return True
             time.sleep(check_interval)
             elapsed_time += check_interval
-        self.log.info("Current focus window is %s",
-                      self.get_my_current_focus_window())
+        self.log.info("Current focus window is %s", self.get_my_current_focus_window())
         return False
 
     def is_user_setup_complete(self):
@@ -1592,7 +1620,7 @@
 
     def is_screen_lock_enabled(self):
         """Check if screen lock is enabled"""
-        cmd = ("dumpsys window policy | grep showing=")
+        cmd = "dumpsys window policy | grep showing="
         out = self.adb.shell(cmd, ignore_status=True)
         return "true" in out
 
@@ -1604,7 +1632,8 @@
             self.log.info("Device is in CrpytKeeper window")
             return True
         if "StatusBar" in current_window and (
-            (not current_app) or "FallbackHome" in current_app):
+            (not current_app) or "FallbackHome" in current_app
+        ):
             self.log.info("Device is locked")
             return True
         return False
@@ -1618,8 +1647,10 @@
                 if self.is_waiting_for_unlock_pin():
                     self.unlock_screen(password=DEFAULT_DEVICE_PASSWORD)
                     time.sleep(1)
-                if not self.is_waiting_for_unlock_pin(
-                ) and self.wait_for_window_ready():
+                if (
+                    not self.is_waiting_for_unlock_pin()
+                    and self.wait_for_window_ready()
+                ):
                     return True
             return False
         else:
@@ -1671,16 +1702,19 @@
 
     def exit_setup_wizard(self):
         # Handling Android TV's setupwizard is ignored for now.
-        if 'feature:android.hardware.type.television' in self.adb.shell(
-                'pm list features'):
+        if "feature:android.hardware.type.television" in self.adb.shell(
+            "pm list features"
+        ):
             return
         if not self.is_user_setup_complete() or self.is_setupwizard_on():
             # b/116709539 need this to prevent reboot after skip setup wizard
-            self.adb.shell("am start -a com.android.setupwizard.EXIT",
-                           ignore_status=True)
-            self.adb.shell("pm disable %s" %
-                           self.get_setupwizard_package_name(),
-                           ignore_status=True)
+            self.adb.shell(
+                "am start -a com.android.setupwizard.EXIT", ignore_status=True
+            )
+            self.adb.shell(
+                "pm disable %s" % self.get_setupwizard_package_name(),
+                ignore_status=True,
+            )
         # Wait up to 5 seconds for user_setup_complete to be updated
         end_time = time.time() + 5
         while time.time() < end_time:
@@ -1705,9 +1739,11 @@
         android_package_name = "com.google.android"
         package = self.adb.shell(
             "pm list packages -f | grep -E {} | grep {}".format(
-                packages_to_skip, android_package_name))
-        wizard_package = package.split('=')[1]
-        activity = package.split('=')[0].split('/')[-2]
+                packages_to_skip, android_package_name
+            )
+        )
+        wizard_package = package.split("=")[1]
+        activity = package.split("=")[0].split("/")[-2]
         self.log.info("%s/.%sActivity" % (wizard_package, activity))
         return "%s/.%sActivity" % (wizard_package, activity)
 
@@ -1728,16 +1764,25 @@
         try:
             self.ensure_verity_disabled()
             self.adb.remount()
-            out = self.adb.push('%s %s' % (src_file_path, dst_file_path),
-                                timeout=push_timeout)
-            if 'error' in out:
-                self.log.error('Unable to push system file %s to %s due to %s',
-                               src_file_path, dst_file_path, out)
+            out = self.adb.push(
+                "%s %s" % (src_file_path, dst_file_path), timeout=push_timeout
+            )
+            if "error" in out:
+                self.log.error(
+                    "Unable to push system file %s to %s due to %s",
+                    src_file_path,
+                    dst_file_path,
+                    out,
+                )
                 return False
             return True
         except Exception as e:
-            self.log.error('Unable to push system file %s to %s due to %s',
-                           src_file_path, dst_file_path, e)
+            self.log.error(
+                "Unable to push system file %s to %s due to %s",
+                src_file_path,
+                dst_file_path,
+                e,
+            )
             return False
 
     def ensure_verity_enabled(self):
@@ -1748,8 +1793,8 @@
         """
         user = self.adb.get_user_id()
         # The below properties will only exist if verity has been enabled.
-        system_verity = self.adb.getprop('partition.system.verified')
-        vendor_verity = self.adb.getprop('partition.vendor.verified')
+        system_verity = self.adb.getprop("partition.system.verified")
+        vendor_verity = self.adb.getprop("partition.vendor.verified")
         if not system_verity or not vendor_verity:
             self.adb.ensure_root()
             self.adb.enable_verity()
@@ -1763,8 +1808,8 @@
         """
         user = self.adb.get_user_id()
         # The below properties will only exist if verity has been enabled.
-        system_verity = self.adb.getprop('partition.system.verified')
-        vendor_verity = self.adb.getprop('partition.vendor.verified')
+        system_verity = self.adb.getprop("partition.system.verified")
+        vendor_verity = self.adb.getprop("partition.vendor.verified")
         if system_verity or vendor_verity:
             self.adb.ensure_root()
             self.adb.disable_verity()
diff --git a/src/antlion/controllers/android_lib/android_api.py b/src/antlion/controllers/android_lib/android_api.py
deleted file mode 100644
index d58fe46..0000000
--- a/src/antlion/controllers/android_lib/android_api.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import logging
-import sys
-
-from antlion.controllers.android_device import AndroidDevice
-from antlion.libs import version_selector
-
-
-class AndroidApi:
-    OLDEST = 0
-    MINIMUM = 0
-    L = 21
-    L_MR1 = 22
-    M = 23
-    N = 24
-    N_MR1 = 25
-    O = 26
-    O_MR1 = 27
-    P = 28
-    LATEST = sys.maxsize
-    MAX = sys.maxsize
-
-
-def android_api(min_api=AndroidApi.OLDEST, max_api=AndroidApi.LATEST):
-    """Decorates a function to only be called for the given API range.
-
-    Only gets called if the AndroidDevice in the args is within the specified
-    API range. Otherwise, a different function may be called instead. If the
-    API level is out of range, and no other function handles that API level, an
-    error is raise instead.
-
-    Note: In Python3.5 and below, the order of kwargs is not preserved. If your
-          function contains multiple AndroidDevices within the kwargs, and no
-          AndroidDevices within args, you are NOT guaranteed the first
-          AndroidDevice is the same one chosen each time the function runs. Due
-          to this, we do not check for AndroidDevices in kwargs.
-
-    Args:
-         min_api: The minimum API level. Can be an int or an AndroidApi value.
-         max_api: The maximum API level. Can be an int or an AndroidApi value.
-    """
-
-    def get_api_level(*args, **_):
-        for arg in args:
-            if isinstance(arg, AndroidDevice):
-                return arg.sdk_api_level()
-        logging.getLogger().error(
-            'An AndroidDevice was not found in the given '
-            'arguments.')
-        return None
-
-    return version_selector.set_version(get_api_level, min_api, max_api)
diff --git a/src/antlion/controllers/android_lib/logcat.py b/src/antlion/controllers/android_lib/logcat.py
index 4e2c4fd..0a5e8f7 100644
--- a/src/antlion/controllers/android_lib/logcat.py
+++ b/src/antlion/controllers/android_lib/logcat.py
@@ -21,7 +21,7 @@
 from antlion.libs.logging import log_stream
 from antlion.libs.logging.log_stream import LogStyles
 
-TIMESTAMP_REGEX = r'((?:\d+-)?\d+-\d+ \d+:\d+:\d+.\d+)'
+TIMESTAMP_REGEX = r"((?:\d+-)?\d+-\d+ \d+:\d+:\d+.\d+)"
 
 
 class TimestampTracker(object):
@@ -43,17 +43,17 @@
 
 def _get_log_level(message):
     """Returns the log level for the given message."""
-    if message.startswith('-') or len(message) < 37:
+    if message.startswith("-") or len(message) < 37:
         return logging.ERROR
     else:
         log_level = message[36]
-        if log_level in ('V', 'D'):
+        if log_level in ("V", "D"):
             return logging.DEBUG
-        elif log_level == 'I':
+        elif log_level == "I":
             return logging.INFO
-        elif log_level == 'W':
+        elif log_level == "W":
             return logging.WARNING
-        elif log_level == 'E':
+        elif log_level == "E":
             return logging.ERROR
     return logging.NOTSET
 
@@ -71,15 +71,18 @@
 def _on_retry(serial, extra_params, timestamp_tracker):
     def on_retry(_):
         begin_at = '"%s"' % (timestamp_tracker.last_timestamp or 1)
-        additional_params = extra_params or ''
+        additional_params = extra_params or ""
 
-        return 'adb -s %s logcat -T %s -v year %s' % (
-            serial, begin_at, additional_params)
+        return "adb -s %s logcat -T %s -v year %s" % (
+            serial,
+            begin_at,
+            additional_params,
+        )
 
     return on_retry
 
 
-def create_logcat_keepalive_process(serial, logcat_dir, extra_params=''):
+def create_logcat_keepalive_process(serial, logcat_dir, extra_params=""):
     """Creates a Logcat Process that automatically attempts to reconnect.
 
     Args:
@@ -91,12 +94,15 @@
         A acts.libs.proc.process.Process object.
     """
     logger = log_stream.create_logger(
-        'adblog_%s' % serial, log_name=serial, subcontext=logcat_dir,
-        log_styles=(LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG))
-    process = Process('adb -s %s logcat -T 1 -v year %s' %
-                      (serial, extra_params))
+        "adblog_%s" % serial,
+        log_name=serial,
+        subcontext=logcat_dir,
+        log_styles=(LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG),
+    )
+    process = Process("adb -s %s logcat -T 1 -v year %s" % (serial, extra_params))
     timestamp_tracker = TimestampTracker()
     process.set_on_output_callback(_log_line_func(logger, timestamp_tracker))
     process.set_on_terminate_callback(
-        _on_retry(serial, extra_params, timestamp_tracker))
+        _on_retry(serial, extra_params, timestamp_tracker)
+    )
     return process
diff --git a/src/antlion/controllers/android_lib/services.py b/src/antlion/controllers/android_lib/services.py
index 6c5f334..098f524 100644
--- a/src/antlion/controllers/android_lib/services.py
+++ b/src/antlion/controllers/android_lib/services.py
@@ -14,7 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
 from antlion.controllers.android_lib import errors
 from antlion.controllers.android_lib import events as android_events
 from antlion.event import event_bus
@@ -44,14 +43,22 @@
         """Registers the _start and _stop methods to their corresponding
         events.
         """
+
         def check_serial(event):
             return self.serial == event.ad.serial
 
         self._registration_ids = [
-            event_bus.register(android_events.AndroidStartServicesEvent,
-                               self._start, filter_fn=check_serial),
-            event_bus.register(android_events.AndroidStopServicesEvent,
-                               self._stop, filter_fn=check_serial)]
+            event_bus.register(
+                android_events.AndroidStartServicesEvent,
+                self._start,
+                filter_fn=check_serial,
+            ),
+            event_bus.register(
+                android_events.AndroidStopServicesEvent,
+                self._stop,
+                filter_fn=check_serial,
+            ),
+        ]
 
     def unregister(self):
         """Unregisters all subscriptions in this service."""
@@ -93,14 +100,15 @@
             return
 
         if not self.ad.is_sl4a_installed():
-            self.ad.log.error('sl4a.apk is not installed')
+            self.ad.log.error("sl4a.apk is not installed")
             raise errors.AndroidDeviceError(
-                'The required sl4a.apk is not installed',
-                serial=self.serial)
+                "The required sl4a.apk is not installed", serial=self.serial
+            )
         if not self.ad.ensure_screen_on():
             self.ad.log.error("User window cannot come up")
             raise errors.AndroidDeviceError(
-                "User window cannot come up", serial=self.serial)
+                "User window cannot come up", serial=self.serial
+            )
 
         droid, ed = self.ad.get_droid()
         ed.start()
diff --git a/src/antlion/controllers/android_lib/tel/tel_utils.py b/src/antlion/controllers/android_lib/tel/tel_utils.py
index 0be8ef5..c18741c 100644
--- a/src/antlion/controllers/android_lib/tel/tel_utils.py
+++ b/src/antlion/controllers/android_lib/tel/tel_utils.py
@@ -13,7 +13,6 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
 """Generic telephony utility functions. Cloned from test_utils.tel."""
 
 import re
@@ -89,21 +88,27 @@
 
 
 def dumpsys_last_call_info(ad):
-    """ Get call information by dumpsys telecom. """
+    """Get call information by dumpsys telecom."""
     num = dumpsys_last_call_number(ad)
     output = ad.adb.shell("dumpsys telecom")
     result = re.search(r"Call TC@%s: {(.*?)}" % num, output, re.DOTALL)
     call_info = {"TC": num}
     if result:
         result = result.group(1)
-        for attr in ("startTime", "endTime", "direction", "isInterrupted",
-                     "callTechnologies", "callTerminationsReason",
-                     "isVideoCall", "callProperties"):
+        for attr in (
+            "startTime",
+            "endTime",
+            "direction",
+            "isInterrupted",
+            "callTechnologies",
+            "callTerminationsReason",
+            "isVideoCall",
+            "callProperties",
+        ):
             match = re.search(r"%s: (.*)" % attr, result)
             if match:
                 if attr in ("startTime", "endTime"):
-                    call_info[attr] = epoch_to_log_line_timestamp(
-                        int(match.group(1)))
+                    call_info[attr] = epoch_to_log_line_timestamp(int(match.group(1)))
                 else:
                     call_info[attr] = match.group(1)
     ad.log.debug("call_info = %s", call_info)
@@ -124,8 +129,7 @@
 
 
 def get_outgoing_voice_sub_id(ad):
-    """ Get outgoing voice subscription id
-    """
+    """Get outgoing voice subscription id"""
     if hasattr(ad, "outgoing_voice_sub_id"):
         return ad.outgoing_voice_sub_id
     else:
@@ -133,7 +137,7 @@
 
 
 def get_rx_tx_power_levels(log, ad):
-    """ Obtains Rx and Tx power levels from the MDS application.
+    """Obtains Rx and Tx power levels from the MDS application.
 
     The method requires the MDS app to be installed in the DUT.
 
@@ -146,36 +150,40 @@
         in Rx chain, and the second element is the transmitted power in dBm.
         Values for invalid Rx / Tx chains are set to None.
     """
-    cmd = ('am instrument -w -e request "80 00 e8 03 00 08 00 00 00" -e '
-           'response wait "com.google.mdstest/com.google.mdstest.instrument.'
-           'ModemCommandInstrumentation"')
+    cmd = (
+        'am instrument -w -e request "80 00 e8 03 00 08 00 00 00" -e '
+        'response wait "com.google.mdstest/com.google.mdstest.instrument.'
+        'ModemCommandInstrumentation"'
+    )
     try:
         output = ad.adb.shell(cmd)
     except AdbCommandError as e:
         log.error(e)
         output = None
 
-    if not output or 'result=SUCCESS' not in output:
-        raise RuntimeError('Could not obtain Tx/Rx power levels from MDS. Is '
-                           'the MDS app installed?')
+    if not output or "result=SUCCESS" not in output:
+        raise RuntimeError(
+            "Could not obtain Tx/Rx power levels from MDS. Is " "the MDS app installed?"
+        )
 
     response = re.search(r"(?<=response=).+", output)
 
     if not response:
-        raise RuntimeError('Invalid response from the MDS app:\n' + output)
+        raise RuntimeError("Invalid response from the MDS app:\n" + output)
 
     # Obtain a list of bytes in hex format from the response string
-    response_hex = response.group(0).split(' ')
+    response_hex = response.group(0).split(" ")
 
     def get_bool(pos):
-        """ Obtain a boolean variable from the byte array. """
-        return response_hex[pos] == '01'
+        """Obtain a boolean variable from the byte array."""
+        return response_hex[pos] == "01"
 
     def get_int32(pos):
-        """ Obtain an int from the byte array. Bytes are printed in
+        """Obtain an int from the byte array. Bytes are printed in
         little endian format."""
         return struct.unpack(
-            '<i', bytearray.fromhex(''.join(response_hex[pos:pos + 4])))[0]
+            "<i", bytearray.fromhex("".join(response_hex[pos : pos + 4]))
+        )[0]
 
     rx_power = []
     RX_CHAINS = 4
@@ -204,7 +212,7 @@
 
 
 def get_telephony_signal_strength(ad):
-    #{'evdoEcio': -1, 'asuLevel': 28, 'lteSignalStrength': 14, 'gsmLevel': 0,
+    # {'evdoEcio': -1, 'asuLevel': 28, 'lteSignalStrength': 14, 'gsmLevel': 0,
     # 'cdmaAsuLevel': 99, 'evdoDbm': -120, 'gsmDbm': -1, 'cdmaEcio': -160,
     # 'level': 2, 'lteLevel': 2, 'cdmaDbm': -120, 'dbm': -112, 'cdmaLevel': 0,
     # 'lteAsuLevel': 28, 'gsmAsuLevel': 99, 'gsmBitErrorRate': 0,
@@ -219,12 +227,14 @@
     return signal_strength
 
 
-def initiate_call(log,
-                  ad,
-                  callee_number,
-                  emergency=False,
-                  incall_ui_display=INCALL_UI_DISPLAY_FOREGROUND,
-                  video=False):
+def initiate_call(
+    log,
+    ad,
+    callee_number,
+    emergency=False,
+    incall_ui_display=INCALL_UI_DISPLAY_FOREGROUND,
+    video=False,
+):
     """Make phone call from caller to callee.
 
     Args:
@@ -253,7 +263,8 @@
 
         # Verify OFFHOOK state
         if not wait_for_call_offhook_for_subscription(
-                log, ad, sub_id, event_tracking_started=True):
+            log, ad, sub_id, event_tracking_started=True
+        ):
             ad.log.info("sub_id %s not in call offhook state", sub_id)
             last_call_drop_reason(ad, begin_time=begin_time)
             return False
@@ -299,7 +310,7 @@
         False otherwise.
     """
     try:
-        value_in_event = event['data'][field]
+        value_in_event = event["data"][field]
     except KeyError:
         return False
     for value in value_list:
@@ -319,12 +330,14 @@
         return ad.droid.telecomIsInCall()
     except:
         return "mCallState=2" in ad.adb.shell(
-            "dumpsys telephony.registry | grep mCallState")
+            "dumpsys telephony.registry | grep mCallState"
+        )
 
 
 def last_call_drop_reason(ad, begin_time=None):
     reasons = ad.search_logcat(
-        "qcril_qmi_voice_map_qmi_to_ril_last_call_failure_cause", begin_time)
+        "qcril_qmi_voice_map_qmi_to_ril_last_call_failure_cause", begin_time
+    )
     reason_string = ""
     if reasons:
         log_msg = "Logcat call drop reasons:"
@@ -333,17 +346,15 @@
             if "ril reason str" in reason["log_message"]:
                 reason_string = reason["log_message"].split(":")[-1].strip()
         ad.log.info(log_msg)
-    reasons = ad.search_logcat("ACTION_FORBIDDEN_NO_SERVICE_AUTHORIZATION",
-                               begin_time)
+    reasons = ad.search_logcat("ACTION_FORBIDDEN_NO_SERVICE_AUTHORIZATION", begin_time)
     if reasons:
         ad.log.warning("ACTION_FORBIDDEN_NO_SERVICE_AUTHORIZATION is seen")
-    ad.log.info("last call dumpsys: %s",
-                sorted(dumpsys_last_call_info(ad).items()))
+    ad.log.info("last call dumpsys: %s", sorted(dumpsys_last_call_info(ad).items()))
     return reason_string
 
 
 def toggle_airplane_mode(log, ad, new_state=None, strict_checking=True):
-    """ Toggle the state of airplane mode.
+    """Toggle the state of airplane mode.
 
     Args:
         log: log handler.
@@ -359,11 +370,12 @@
         return toggle_airplane_mode_by_adb(log, ad, new_state)
     else:
         return toggle_airplane_mode_msim(
-            log, ad, new_state, strict_checking=strict_checking)
+            log, ad, new_state, strict_checking=strict_checking
+        )
 
 
 def toggle_airplane_mode_by_adb(log, ad, new_state=None):
-    """ Toggle the state of airplane mode.
+    """Toggle the state of airplane mode.
 
     Args:
         log: log handler.
@@ -392,7 +404,7 @@
 
 
 def toggle_airplane_mode_msim(log, ad, new_state=None, strict_checking=True):
-    """ Toggle the state of airplane mode.
+    """Toggle the state of airplane mode.
 
     Args:
         log: log handler.
@@ -411,13 +423,12 @@
         return True
     elif new_state is None:
         new_state = not cur_state
-        ad.log.info("Toggle APM mode, from current tate %s to %s", cur_state,
-                    new_state)
+        ad.log.info("Toggle APM mode, from current tate %s to %s", cur_state, new_state)
     sub_id_list = []
     active_sub_info = ad.droid.subscriptionGetAllSubInfoList()
     if active_sub_info:
         for info in active_sub_info:
-            sub_id_list.append(info['subscriptionId'])
+            sub_id_list.append(info["subscriptionId"])
 
     ad.ed.clear_all_events()
     time.sleep(0.1)
@@ -436,8 +447,7 @@
         ad.log.info("Turn off airplane mode")
 
     for sub_id in sub_id_list:
-        ad.droid.telephonyStartTrackingServiceStateChangeForSubscription(
-            sub_id)
+        ad.droid.telephonyStartTrackingServiceStateChangeForSubscription(sub_id)
 
     timeout_time = time.time() + MAX_WAIT_TIME_AIRPLANEMODE_EVENT
     ad.droid.connectivityToggleAirplaneMode(new_state)
@@ -449,35 +459,39 @@
                 is_event_match_for_list,
                 timeout=MAX_WAIT_TIME_AIRPLANEMODE_EVENT,
                 field=ServiceStateContainer.SERVICE_STATE,
-                value_list=service_state_list)
+                value_list=service_state_list,
+            )
             ad.log.info("Got event %s", event)
         except Empty:
-            ad.log.warning("Did not get expected service state change to %s",
-                           service_state_list)
+            ad.log.warning(
+                "Did not get expected service state change to %s", service_state_list
+            )
         finally:
             for sub_id in sub_id_list:
-                ad.droid.telephonyStopTrackingServiceStateChangeForSubscription(
-                    sub_id)
+                ad.droid.telephonyStopTrackingServiceStateChangeForSubscription(sub_id)
     except Exception as e:
         ad.log.error(e)
 
     # APM on (new_state=True) will turn off bluetooth but may not turn it on
     try:
         if new_state and not _wait_for_bluetooth_in_state(
-                log, ad, False, timeout_time - time.time()):
-            ad.log.error(
-                "Failed waiting for bluetooth during airplane mode toggle")
-            if strict_checking: return False
+            log, ad, False, timeout_time - time.time()
+        ):
+            ad.log.error("Failed waiting for bluetooth during airplane mode toggle")
+            if strict_checking:
+                return False
     except Exception as e:
         ad.log.error("Failed to check bluetooth state due to %s", e)
         if strict_checking:
             raise
 
     # APM on (new_state=True) will turn off wifi but may not turn it on
-    if new_state and not _wait_for_wifi_in_state(log, ad, False,
-                                                 timeout_time - time.time()):
+    if new_state and not _wait_for_wifi_in_state(
+        log, ad, False, timeout_time - time.time()
+    ):
         ad.log.error("Failed waiting for wifi during airplane mode toggle on")
-        if strict_checking: return False
+        if strict_checking:
+            return False
 
     if ad.droid.connectivityCheckAirplaneMode() != new_state:
         ad.log.error("Set airplane mode to %s failed", new_state)
@@ -516,11 +530,12 @@
 
 
 def wait_for_call_offhook_event(
-        log,
-        ad,
-        sub_id,
-        event_tracking_started=False,
-        timeout=MAX_WAIT_TIME_ACCEPT_CALL_TO_OFFHOOK_EVENT):
+    log,
+    ad,
+    sub_id,
+    event_tracking_started=False,
+    timeout=MAX_WAIT_TIME_ACCEPT_CALL_TO_OFFHOOK_EVENT,
+):
     """Wait for an incoming call on specified subscription.
 
     Args:
@@ -542,25 +557,26 @@
             is_event_match,
             timeout=timeout,
             field=CallStateContainer.CALL_STATE,
-            value=TELEPHONY_STATE_OFFHOOK)
+            value=TELEPHONY_STATE_OFFHOOK,
+        )
         ad.log.info("Got event %s", TELEPHONY_STATE_OFFHOOK)
     except Empty:
         ad.log.info("No event for call state change to OFFHOOK")
         return False
     finally:
         if not event_tracking_started:
-            ad.droid.telephonyStopTrackingCallStateChangeForSubscription(
-                sub_id)
+            ad.droid.telephonyStopTrackingCallStateChangeForSubscription(sub_id)
     return True
 
 
 def wait_for_call_offhook_for_subscription(
-        log,
-        ad,
-        sub_id,
-        event_tracking_started=False,
-        timeout=MAX_WAIT_TIME_ACCEPT_CALL_TO_OFFHOOK_EVENT,
-        interval=WAIT_TIME_BETWEEN_STATE_CHECK):
+    log,
+    ad,
+    sub_id,
+    event_tracking_started=False,
+    timeout=MAX_WAIT_TIME_ACCEPT_CALL_TO_OFFHOOK_EVENT,
+    interval=WAIT_TIME_BETWEEN_STATE_CHECK,
+):
     """Wait for an incoming call on specified subscription.
 
     Args:
@@ -582,32 +598,32 @@
     try:
         while time.time() < end_time:
             if not offhook_event_received:
-                if wait_for_call_offhook_event(log, ad, sub_id, True,
-                                               interval):
+                if wait_for_call_offhook_event(log, ad, sub_id, True, interval):
                     offhook_event_received = True
-            telephony_state = ad.droid.telephonyGetCallStateForSubscription(
-                sub_id)
+            telephony_state = ad.droid.telephonyGetCallStateForSubscription(sub_id)
             telecom_state = ad.droid.telecomGetCallState()
             if telephony_state == TELEPHONY_STATE_OFFHOOK and (
-                    telecom_state == TELEPHONY_STATE_OFFHOOK):
+                telecom_state == TELEPHONY_STATE_OFFHOOK
+            ):
                 ad.log.info("telephony and telecom are in OFFHOOK state")
                 return True
             else:
                 ad.log.info(
                     "telephony in %s, telecom in %s, expecting OFFHOOK state",
-                    telephony_state, telecom_state)
+                    telephony_state,
+                    telecom_state,
+                )
             if offhook_event_received:
                 time.sleep(interval)
     finally:
         if not event_tracking_started:
-            ad.droid.telephonyStopTrackingCallStateChangeForSubscription(
-                sub_id)
+            ad.droid.telephonyStopTrackingCallStateChangeForSubscription(sub_id)
 
 
 def _wait_for_bluetooth_in_state(log, ad, state, max_wait):
     # FIXME: These event names should be defined in a common location
-    _BLUETOOTH_STATE_ON_EVENT = 'BluetoothStateChangedOn'
-    _BLUETOOTH_STATE_OFF_EVENT = 'BluetoothStateChangedOff'
+    _BLUETOOTH_STATE_ON_EVENT = "BluetoothStateChangedOn"
+    _BLUETOOTH_STATE_OFF_EVENT = "BluetoothStateChangedOff"
     ad.ed.clear_events(_BLUETOOTH_STATE_ON_EVENT)
     ad.ed.clear_events(_BLUETOOTH_STATE_OFF_EVENT)
 
@@ -617,20 +633,21 @@
         if bt_state == state:
             return True
         if max_wait <= 0:
-            ad.log.error("Time out: bluetooth state still %s, expecting %s",
-                         bt_state, state)
+            ad.log.error(
+                "Time out: bluetooth state still %s, expecting %s", bt_state, state
+            )
             return False
 
-        event = {
-            False: _BLUETOOTH_STATE_OFF_EVENT,
-            True: _BLUETOOTH_STATE_ON_EVENT
-        }[state]
+        event = {False: _BLUETOOTH_STATE_OFF_EVENT, True: _BLUETOOTH_STATE_ON_EVENT}[
+            state
+        ]
         event = ad.ed.pop_event(event, max_wait)
-        ad.log.info("Got event %s", event['name'])
+        ad.log.info("Got event %s", event["name"])
         return True
     except Empty:
-        ad.log.error("Time out: bluetooth state still in %s, expecting %s",
-                     bt_state, state)
+        ad.log.error(
+            "Time out: bluetooth state still in %s, expecting %s", bt_state, state
+        )
         return False
     finally:
         ad.droid.bluetoothStopListeningForAdapterStateChange()
@@ -651,8 +668,7 @@
     return _wait_for_droid_in_state(log, ad, max_time, is_phone_in_call)
 
 
-def _wait_for_droid_in_state(log, ad, max_time, state_check_func, *args,
-                             **kwargs):
+def _wait_for_droid_in_state(log, ad, max_time, state_check_func, *args, **kwargs):
     while max_time >= 0:
         if state_check_func(log, ad, *args, **kwargs):
             return True
@@ -665,7 +681,10 @@
 
 # TODO: replace this with an event-based function
 def _wait_for_wifi_in_state(log, ad, state, max_wait):
-    return _wait_for_droid_in_state(log, ad, max_wait,
-        lambda log, ad, state: \
-                (True if ad.droid.wifiCheckState() == state else False),
-                state)
+    return _wait_for_droid_in_state(
+        log,
+        ad,
+        max_wait,
+        lambda log, ad, state: (True if ad.droid.wifiCheckState() == state else False),
+        state,
+    )
diff --git a/src/antlion/controllers/anritsu_lib/OWNERS b/src/antlion/controllers/anritsu_lib/OWNERS
deleted file mode 100644
index e4010df..0000000
--- a/src/antlion/controllers/anritsu_lib/OWNERS
+++ /dev/null
@@ -1,4 +0,0 @@
-iguarna@google.com
-chaoyangf@google.com
-yixiang@google.com
-codycaldwell@google.com
\ No newline at end of file
diff --git a/src/antlion/controllers/anritsu_lib/__init__.py b/src/antlion/controllers/anritsu_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/anritsu_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/anritsu_lib/_anritsu_utils.py b/src/antlion/controllers/anritsu_lib/_anritsu_utils.py
deleted file mode 100644
index ea5736f..0000000
--- a/src/antlion/controllers/anritsu_lib/_anritsu_utils.py
+++ /dev/null
@@ -1,233 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Utility functions for for Anritsu Signalling Tester.
-"""
-# yapf: disable
-
-OPERATION_COMPLETE = 1
-NO_ERROR = 0
-
-ANRITSU_ERROR_CODES = {
-    0: 'No errors occurred',
-    2: 'The specified file does not exist',
-    14: 'The buffer size is insufficient',
-    29: 'The save destination is a write-protected file.',
-    80: 'A file with the same name already exists.'
-        ' (If Overwrite is specified to 0.)',
-    87: 'The specified value is wrong.',
-    112: 'The disk space is insufficient.',
-    183: 'SmartStudio is already running.',
-    1060: 'The control software has not been started or has already terminated',
-    1067: 'SmartStudio, control software or SMS Centre could not start due to'
-          'a problem or problems resulting from OS or the MD8475A system.',
-    1229: 'Connecting to the server failed.',
-    1235: 'A request is suspended.',
-    1460: 'The operation is terminated due to the expiration of the'
-          ' timeout period.',
-    9999: 'A GPIB command error occurred.',
-    536870912: 'The license could not be confirmed.',
-    536870913: 'The specified file cannot be loaded by the SmartStudio.',
-    536870914: 'The specified process ID does not exist.',
-    536870915: 'The received data does not exist.',
-    536870916: 'Simulation is not running.',
-    536870917: 'Simulation is running.',
-    536870918: 'Test Case has never been executed.',
-    536870919: 'The resource cannot be obtained.',
-    536870920: 'A resource protocol error, such as download error or'
-               ' license error, occurred.',
-    536870921: 'The function call has been in invalid status.',
-    536870922: 'The current Simulation Model does not allow the operation.',
-    536870923: 'The Cell name to be set does not exist.',
-    536870924: 'The test is being executed.',
-    536870925: 'The current UE status does not correspond to the'
-               ' test parameters.',
-    536870926: 'There is no LOG information because the simulation'
-               ' has not been executed.',
-    536870927: 'Measure Export has already been executed.',
-    536870928: 'SmartStudio is not connected to the SMS Centre.',
-    536870929: 'SmartStudio failed to send an SMS message to the SMS Centre.',
-    536870930: 'SmartStudio has successfully sent an SMS message'
-               ' to the SMS Centre,but the SMS Centre judges it as an error.',
-    536870931: 'The processing that is unavailable with the current system'
-               ' status has been executed.',
-    536870932: 'The option could not be confirmed.',
-    536870933: 'Measure Export has been stopped.',
-    536870934: 'SmartStudio cannot load the specified file because the'
-               ' version is old.',
-    536870935: 'The data with the specified PDN number does not exist.',
-    536870936: 'The data with the specified Dedicated number does not exist.',
-    536870937: 'The PDN data cannot be added because the upper limit of the'
-               ' number of PDN data has been reached.',
-    536870938: 'The number of antennas, which cannot be set to the current'
-               ' Simulation Model,has been specified.',
-    536870939: 'Calibration of path loss failed.',
-    536870940: 'There is a parameter conflict.',
-    536870941: 'The DL Ref Power setting is out of the setting range'
-               ' at W-CDMA (Evolution).',
-    536870942: 'DC-HSDPA is not available for the current channel setting.',
-    536870943: 'The specified Packet Rate cannot be used by the current'
-               ' Simulation Model.',
-    536870944: 'The W-CDMA Cell parameter F-DPCH is set to Enable.',
-    536870945: 'Target is invalid.',
-    536870946: 'The PWS Centre detects an error.',
-    536870947: 'The Ec/Ior setting is invalid.',
-    536870948: 'The combination of Attach Type and TA Update Type is invalid.',
-    536870949: 'The license of the option has expired.',
-    536870950: 'The Ping command is being executed.',
-    536870951: 'The Ping command is not being executed.',
-    536870952: 'The current Test Case parameter setting is wrong.',
-    536870953: 'The specified IP address is the same as that of Default Gateway'
-               'specified by Simulation parameter.',
-    536870954: 'TFT IE conversion failed.',
-    536870955: 'Saving settings to the SmartStudio scenario failed.',
-    536875008: 'An error exists in the parameter configuration.'
-               '(This error applies only to the current version.)',
-    536936448: 'License verification failed.',
-    536936449: 'The IMS Services cannot load the specified file.',
-    536936462: 'Simulation is not performed and no log information exists.',
-    536936467: 'The executed process is inoperable in the current status'
-               ' of Visual User Agent.',
-    536936707: 'The specified Virtual Network is not running.',
-    536936709: 'The specified Virtual Network is running. '
-               'Any one of the Virtual Networks is running.',
-    536936727: 'The specified Virtual Network does not exist.',
-    536936729: 'When the Virtual Network already exists.',
-    554762241: 'The RF Measurement launcher cannot be accessed.',
-    554762242: 'License check of the RF Measurement failed.',
-    554762243: 'Function is called when RF Measurement cannot be set.',
-    554762244: 'RF Measurement has been already started.',
-    554762245: 'RF Measurement failed to start due to a problem resulting'
-               ' from OS or the MD8475A system.',
-    554762246: 'RF Measurement is not started or is already terminated.',
-    554762247: 'There is a version mismatch between RF Measurement and CAL.',
-    554827777: 'The specified value for RF Measurement is abnormal.',
-    554827778: 'GPIB command error has occurred in RF Measurement.',
-    554827779: 'Invalid file path was specified to RF Measurement.',
-    554827780: 'RF Measurement argument is NULL pointer.',
-    555810817: 'RF Measurement is now performing the measurement.',
-    555810818: 'RF Measurement is now not performing the measurement.',
-    555810819: 'RF Measurement is not measured yet. (There is no result '
-               'information since measurement is not performed.)',
-    555810820: 'An error has occurred when RF Measurement'
-               ' starts the measurement.',
-    555810821: 'Simulation has stopped when RF Measurement is '
-               'performing the measurement.',
-    555810822: 'An error has been retrieved from the Platform when '
-               'RF Measurement is performing the measurement.',
-    555810823: 'Measurement has been started in the system state where RF '
-               'Measurement is invalid.',
-    556859393: 'RF Measurement is now saving a file.',
-    556859394: 'There is insufficient disk space when saving'
-               'a Measure Result file of RF Measurement.',
-    556859395: 'An internal error has occurred or USB cable has been'
-               ' disconnected when saving a Measure Result'
-               ' file of RF Measurement.',
-    556859396: 'A write-protected file was specified as the save destination'
-               ' when saving a Measure Result file of RF Measurement.',
-    568328193: 'An internal error has occurred in RF Measurement.',
-    687865857: 'Calibration Measure DSP is now being measured.',
-    687865858: 'Calibration measurement failed.',
-    687865859: 'Calibration slot is empty or its system does not apply.',
-    687865860: 'Unexpected command is received from Calibration HWC.',
-    687865861: 'Failed to receive the Calibration measurement result.',
-    687865862: 'Failed to open the correction value file on the'
-               ' Calibration HDD.',
-    687865863: 'Failed to move the pointer on the Calibration correction'
-               ' value table.',
-    687865864: 'Failed to write the correction value to the Calibration'
-               ' correction value file on the Calibration HDD.',
-    687865865: 'Failed to load the correction value from the Calibration HDD.',
-    687865866: 'Failed to create a directory to which the correction value '
-               'file on the Calibration HDD is saved.',
-    687865867: 'Correction data has not been written in the'
-               ' Calibration-specified correction table.',
-    687865868: 'Data received from Calibration HWC does not exist.',
-    687865869: 'Data has not been written to the Flash ROM'
-               ' of Calibration BASE UNIT.',
-    687865870: 'Correction data has not been written to the'
-               ' Calibration-specified sector.',
-    687866111: 'An calibration error other than described above occurred.',
-}
-
-
-def _error_code_tostring(error_code):
-    ''' returns the description of the error from the error code
-    returned by anritsu MD8475A '''
-    try:
-        error_string = ANRITSU_ERROR_CODES[error_code]
-    except KeyError:
-        error_string = "Error : {} ".format(error_code)
-
-    return error_string
-
-
-class AnritsuUtils(object):
-    def gsm_encode(text):
-        '''To encode text string with GSM 7-bit alphabet for common symbols'''
-        table = {' ': '%20', '!': '%21', '\"': '%22', '#': '%23', '$': '%24',
-                 '/': '%2F', '%': '%25', '&': '%26', '\'': '%27', '(': '%28',
-                 ')': '%29', '*': '%2A', '+': '%2B', ',': '%2C', ':': '%3A',
-                 ';': '%3B', '<': '%3C', '=': '%3D', '>': '%3E', '?': '%3F',
-                 '@': '%40', '[': '%5B', ']': '%5D', '_': '%5F', 'é': '%C3%A9'}
-        coded_str = ""
-        for char in text:
-            if char in table:
-                coded_str += table[char]
-            else:
-                coded_str += char
-        return coded_str
-
-    def gsm_decode(text):
-        '''To decode text string with GSM 7-bit alphabet for common symbols'''
-        table = {'%20': ' ', '%21': '!', '%22': '\"', '%23': '#', '%24': '$',
-                 '%2F': '/', '%25': '%', '%26': '&', '%27': '\'', '%28': '(',
-                 '%29': ')', '%2A': '*', '%2B': '+', '%2C': ',', '%3A': ':',
-                 '%3B': ';', '%3C': '<', '%3D': '=', '%3E': '>', '%3F': '?',
-                 '%40': '@', '%5B': '[', '%5D': ']', '%5F': '_', '%C3%A9': 'é'}
-        coded_str = text
-        for char in table:
-            if char in text:
-                coded_str = coded_str.replace(char, table[char])
-        return coded_str
-
-    def cdma_encode(text):
-        '''To encode text string with GSM 7-bit alphabet for common symbols'''
-        table = {' ': '%20', '!': '%21', '\"': '%22', '#': '%23', '$': '%24',
-                 '/': '%2F', '%': '%25', '&': '%26', '\'': '%27', '(': '%28',
-                 ')': '%29', '*': '%2A', '+': '%2B', ',': '%2C', ':': '%3A',
-                 ';': '%3B', '<': '%3C', '=': '%3D', '>': '%3E', '?': '%3F',
-                 '@': '%40', '[': '%5B', ']': '%5D', '_': '%5F'}
-        coded_str = ""
-        for char in text:
-            if char in table:
-                coded_str += table[char]
-            else:
-                coded_str += char
-        return coded_str
-
-class AnritsuError(Exception):
-    '''Exception for errors related to Anritsu.'''
-    def __init__(self, error, command=None):
-        self._error_code = error
-        self._error_message = _error_code_tostring(self._error_code)
-        if command is not None:
-            self._error_message = "Command {} returned the error: '{}'".format(
-                                  command, self._error_message)
-
-    def __str__(self):
-        return self._error_message
-# yapf: enable
diff --git a/src/antlion/controllers/anritsu_lib/band_constants.py b/src/antlion/controllers/anritsu_lib/band_constants.py
deleted file mode 100644
index 18dd5bc..0000000
--- a/src/antlion/controllers/anritsu_lib/band_constants.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# GSM BAND constants
-GSM_BAND_GSM450 = "GSM450"
-GSM_BAND_GSM480 = "GSM480"
-GSM_BAND_GSM850 = "GSM850"
-GSM_BAND_PGSM900 = "P-GSM900"
-GSM_BAND_EGSM900 = "E-GSM900"
-GSM_BAND_RGSM900 = "R-GSM900"
-GSM_BAND_DCS1800 = "DCS1800"
-GSM_BAND_PCS1900 = "PCS1900"
-
-LTE_BAND_2 = 2
-LTE_BAND_4 = 4
-LTE_BAND_12 = 12
-WCDMA_BAND_1 = 1
-WCDMA_BAND_2 = 2
diff --git a/src/antlion/controllers/anritsu_lib/cell_configurations.py b/src/antlion/controllers/anritsu_lib/cell_configurations.py
deleted file mode 100644
index 83773e0..0000000
--- a/src/antlion/controllers/anritsu_lib/cell_configurations.py
+++ /dev/null
@@ -1,327 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Sanity tests for voice tests in telephony
-"""
-from antlion.controllers.anritsu_lib.band_constants import GSM_BAND_PCS1900
-from antlion.controllers.anritsu_lib.band_constants import GSM_BAND_GSM850
-from antlion.controllers.anritsu_lib.band_constants import LTE_BAND_2
-from antlion.controllers.anritsu_lib.band_constants import LTE_BAND_4
-from antlion.controllers.anritsu_lib.band_constants import LTE_BAND_12
-from antlion.controllers.anritsu_lib.band_constants import WCDMA_BAND_1
-from antlion.controllers.anritsu_lib.band_constants import WCDMA_BAND_2
-from antlion.controllers.anritsu_lib.md8475a import BtsBandwidth
-
-# Different Cell configurations
-# TMO bands
-lte_band4_ch2000_fr2115_pcid1_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 11,
-    'cid': 1,
-    'pcid': 1,
-    'channel': 2000
-}
-
-lte_band4_ch2000_fr2115_pcid2_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 12,
-    'cid': 2,
-    'pcid': 2,
-    'channel': 2000
-}
-
-lte_band4_ch2000_fr2115_pcid3_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 13,
-    'cid': 3,
-    'pcid': 3,
-    'channel': 2000
-}
-
-lte_band4_ch2000_fr2115_pcid4_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 14,
-    'cid': 4,
-    'pcid': 4,
-    'channel': 2000
-}
-
-lte_band4_ch2000_fr2115_pcid5_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 15,
-    'cid': 5,
-    'pcid': 5,
-    'channel': 2000
-}
-
-lte_band4_ch2000_fr2115_pcid6_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 16,
-    'cid': 6,
-    'pcid': 6,
-    'channel': 2000
-}
-
-lte_band4_ch2050_fr2120_pcid7_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 17,
-    'cid': 7,
-    'pcid': 7,
-    'channel': 2050
-}
-
-lte_band4_ch2250_fr2140_pcid8_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 18,
-    'cid': 8,
-    'pcid': 8,
-    'channel': 2250
-}
-
-lte_band2_ch900_fr1960_pcid9_cell = {
-    'band': LTE_BAND_2,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 19,
-    'cid': 9,
-    'pcid': 9,
-    'channel': 900
-}
-
-lte_band12_ch5095_fr737_pcid10_cell = {
-    'band': LTE_BAND_12,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 20,
-    'cid': 10,
-    'pcid': 10,
-    'channel': 5095
-}
-
-wcdma_band1_ch10700_fr2140_cid31_cell = {
-    'band': WCDMA_BAND_1,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 31,
-    'rac': 31,
-    'cid': 31,
-    'channel': 10700,
-    'psc': 31
-}
-
-wcdma_band1_ch10700_fr2140_cid32_cell = {
-    'band': WCDMA_BAND_1,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 32,
-    'rac': 32,
-    'cid': 32,
-    'channel': 10700,
-    'psc': 32
-}
-
-wcdma_band1_ch10700_fr2140_cid33_cell = {
-    'band': WCDMA_BAND_1,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 33,
-    'rac': 33,
-    'cid': 33,
-    'channel': 10700,
-    'psc': 33
-}
-
-wcdma_band1_ch10700_fr2140_cid34_cell = {
-    'band': WCDMA_BAND_1,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 34,
-    'rac': 34,
-    'cid': 34,
-    'channel': 10700,
-    'psc': 34
-}
-
-wcdma_band1_ch10700_fr2140_cid35_cell = {
-    'band': WCDMA_BAND_1,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 35,
-    'rac': 35,
-    'cid': 35,
-    'channel': 10700,
-    'psc': 35
-}
-
-wcdma_band1_ch10575_fr2115_cid36_cell = {
-    'band': WCDMA_BAND_1,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 36,
-    'rac': 36,
-    'cid': 36,
-    'channel': 10575,
-    'psc': 36
-}
-
-wcdma_band1_ch10800_fr2160_cid37_cell = {
-    'band': WCDMA_BAND_1,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 37,
-    'rac': 37,
-    'cid': 37,
-    'channel': 10800,
-    'psc': 37
-}
-
-wcdma_band2_ch9800_fr1960_cid38_cell = {
-    'band': WCDMA_BAND_2,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 38,
-    'rac': 38,
-    'cid': 38,
-    'channel': 9800,
-    'psc': 38
-}
-
-wcdma_band2_ch9900_fr1980_cid39_cell = {
-    'band': WCDMA_BAND_2,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 39,
-    'rac': 39,
-    'cid': 39,
-    'channel': 9900,
-    'psc': 39
-}
-
-gsm_band1900_ch512_fr1930_cid51_cell = {
-    'band': GSM_BAND_PCS1900,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 51,
-    'rac': 51,
-    'cid': 51,
-    'channel': 512,
-}
-
-gsm_band1900_ch512_fr1930_cid52_cell = {
-    'band': GSM_BAND_PCS1900,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 52,
-    'rac': 52,
-    'cid': 52,
-    'channel': 512,
-}
-
-gsm_band1900_ch512_fr1930_cid53_cell = {
-    'band': GSM_BAND_PCS1900,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 53,
-    'rac': 53,
-    'cid': 53,
-    'channel': 512,
-}
-
-gsm_band1900_ch512_fr1930_cid54_cell = {
-    'band': GSM_BAND_PCS1900,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 54,
-    'rac': 54,
-    'cid': 54,
-    'channel': 512,
-}
-
-gsm_band1900_ch512_fr1930_cid55_cell = {
-    'band': GSM_BAND_PCS1900,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 55,
-    'rac': 55,
-    'cid': 55,
-    'channel': 512,
-}
-
-gsm_band1900_ch640_fr1955_cid56_cell = {
-    'band': GSM_BAND_PCS1900,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 56,
-    'rac': 56,
-    'cid': 56,
-    'channel': 640,
-}
-
-gsm_band1900_ch750_fr1977_cid57_cell = {
-    'band': GSM_BAND_PCS1900,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 57,
-    'rac': 57,
-    'cid': 57,
-    'channel': 750,
-}
-
-gsm_band850_ch128_fr869_cid58_cell = {
-    'band': GSM_BAND_GSM850,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 58,
-    'rac': 58,
-    'cid': 58,
-    'channel': 128,
-}
-
-gsm_band850_ch251_fr893_cid59_cell = {
-    'band': GSM_BAND_GSM850,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 59,
-    'rac': 59,
-    'cid': 59,
-    'channel': 251,
-}
diff --git a/src/antlion/controllers/anritsu_lib/md8475_cellular_simulator.py b/src/antlion/controllers/anritsu_lib/md8475_cellular_simulator.py
deleted file mode 100644
index 55a89e9..0000000
--- a/src/antlion/controllers/anritsu_lib/md8475_cellular_simulator.py
+++ /dev/null
@@ -1,732 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import math
-import ntpath
-import time
-import antlion.controllers.cellular_simulator as cc
-from antlion.controllers.cellular_lib import LteSimulation
-from antlion.controllers.anritsu_lib import md8475a
-from antlion.controllers.anritsu_lib import _anritsu_utils as anritsu
-
-
-class MD8475CellularSimulator(cc.AbstractCellularSimulator):
-
-    MD8475_VERSION = 'A'
-
-    # Indicates if it is able to use 256 QAM as the downlink modulation for LTE
-    LTE_SUPPORTS_DL_256QAM = False
-
-    # Indicates if it is able to use 64 QAM as the uplink modulation for LTE
-    LTE_SUPPORTS_UL_64QAM = False
-
-    # Indicates if 4x4 MIMO is supported for LTE
-    LTE_SUPPORTS_4X4_MIMO = False
-
-    # The maximum number of carriers that this simulator can support for LTE
-    LTE_MAX_CARRIERS = 2
-
-    # The maximum power that the equipment is able to transmit
-    MAX_DL_POWER = -10
-
-    # Simulation config files in the callbox computer.
-    # These should be replaced in the future by setting up
-    # the same configuration manually.
-    LTE_BASIC_SIM_FILE = 'SIM_default_LTE.wnssp'
-    LTE_BASIC_CELL_FILE = 'CELL_LTE_config.wnscp'
-    LTE_CA_BASIC_SIM_FILE = 'SIM_LTE_CA.wnssp'
-    LTE_CA_BASIC_CELL_FILE = 'CELL_LTE_CA_config.wnscp'
-
-    # Filepath to the config files stored in the Anritsu callbox. Needs to be
-    # formatted to replace {} with either A or B depending on the model.
-    CALLBOX_CONFIG_PATH = 'C:\\Users\\MD8475A\\Documents\\DAN_configs\\'
-
-    def __init__(self, ip_address):
-        """ Initializes the cellular simulator.
-
-        Args:
-            ip_address: the ip address of the MD8475 instrument
-        """
-        super().__init__()
-
-        try:
-            self.anritsu = md8475a.MD8475A(ip_address,
-                                           md8475_version=self.MD8475_VERSION)
-        except anritsu.AnritsuError:
-            raise cc.CellularSimulatorError('Could not connect to MD8475.')
-
-        self.bts = None
-
-    def destroy(self):
-        """ Sends finalization commands to the cellular equipment and closes
-        the connection. """
-        self.anritsu.stop_simulation()
-        self.anritsu.disconnect()
-
-    def setup_lte_scenario(self):
-        """ Configures the equipment for an LTE simulation. """
-        cell_file_name = self.LTE_BASIC_CELL_FILE
-        sim_file_name = self.LTE_BASIC_SIM_FILE
-
-        cell_file_path = ntpath.join(self.CALLBOX_CONFIG_PATH, cell_file_name)
-        sim_file_path = ntpath.join(self.CALLBOX_CONFIG_PATH, sim_file_name)
-
-        self.anritsu.load_simulation_paramfile(sim_file_path)
-        self.anritsu.load_cell_paramfile(cell_file_path)
-
-        # MD4875A supports only 2 carriers. The MD4875B class adds other cells.
-        self.bts = [
-            self.anritsu.get_BTS(md8475a.BtsNumber.BTS1),
-            self.anritsu.get_BTS(md8475a.BtsNumber.BTS2)
-        ]
-
-    def set_band_combination(self, bands):
-        """ Prepares the test equipment for the indicated band combination.
-
-        The reason why this is implemented in a separate method and not calling
-        LteSimulation.BtsConfig for each separate band is that configuring each
-        ssc cannot be done separately, as it is necessary to know which
-        carriers are on the same band in order to decide which RF outputs can
-        be shared in the test equipment.
-
-        Args:
-            bands: a list of bands represented as ints or strings
-        """
-        self.num_carriers = len(bands)
-
-        # Validate the number of carriers.
-        if self.num_carriers > self.LTE_MAX_CARRIERS:
-            raise cc.CellularSimulatorError('The test equipment supports up '
-                                            'to {} carriers.'.format(
-                                                self.LTE_MAX_CARRIERS))
-
-        # Initialize the base stations in the test equipment
-        self.anritsu.set_simulation_model(
-            *[md8475a.BtsTechnology.LTE for _ in range(self.num_carriers)],
-            reset=False)
-
-        # If base stations use different bands, make sure that the RF cards are
-        # not being shared by setting the right maximum MIMO modes
-        if self.num_carriers == 2:
-            # RF cards are never shared when doing 2CA so 4X4 can be done in
-            # both base stations.
-            self.bts[0].mimo_support = md8475a.LteMimoMode.MIMO_4X4
-            self.bts[1].mimo_support = md8475a.LteMimoMode.MIMO_4X4
-        elif self.num_carriers == 3:
-            # 4X4 can only be done in the second base station if it is shared
-            # with the primary. If the RF cards cannot be shared, then at most
-            # 2X2 can be done.
-            self.bts[0].mimo_support = md8475a.LteMimoMode.MIMO_4X4
-            if bands[0] == bands[1]:
-                self.bts[1].mimo_support = md8475a.LteMimoMode.MIMO_4X4
-            else:
-                self.bts[1].mimo_support = md8475a.LteMimoMode.MIMO_2X2
-            self.bts[2].mimo_support = md8475a.LteMimoMode.MIMO_2X2
-        elif self.num_carriers > 3:
-            raise NotImplementedError('The controller doesn\'t implement more '
-                                      'than 3 carriers for MD8475B yet.')
-
-        # Enable carrier aggregation if there is more than one carrier
-        if self.num_carriers > 1:
-            self.anritsu.set_carrier_aggregation_enabled()
-
-        # Restart the simulation as changing the simulation model will stop it.
-        self.anritsu.start_simulation()
-
-    def set_input_power(self, bts_index, input_power):
-        """ Sets the input power for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            input_power: the new input power
-        """
-        nrb_ul = int(self.bts[bts_index].nrb_ul)
-        max_nrb_ul = self.bts[bts_index].max_nrb_ul
-        input_level = str(
-            round(input_power - 10 * math.log10(nrb_ul / max_nrb_ul), 1))
-        if nrb_ul < max_nrb_ul:
-            self.log.info('Number of UL RBs ({}) is less than the maximum RB '
-                          'allocation ({}). Increasing UL reference power to '
-                          '{} dbm to compensate'.format(
-                              nrb_ul, max_nrb_ul, input_level))
-        self.bts[bts_index].input_level = input_level
-
-    def set_output_power(self, bts_index, output_power):
-        """ Sets the output power for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            output_power: the new output power
-        """
-        self.bts[bts_index].output_level = output_power
-
-    def set_downlink_channel_number(self, bts_index, channel_number):
-        """ Sets the downlink channel number for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            channel_number: the new channel number
-        """
-        # Temporarily adding this line to workaround a bug in the
-        # Anritsu callbox in which the channel number needs to be set
-        # to a different value before setting it to the final one.
-        self.bts[bts_index].dl_channel = str(int(channel_number + 1))
-        time.sleep(8)
-        self.bts[bts_index].dl_channel = str(int(channel_number))
-
-    def set_dl_256_qam_enabled(self, bts_index, enabled):
-        """ Determines what MCS table should be used for the downlink.
-
-        Args:
-            bts_index: the base station number
-            enabled: whether 256 QAM should be used
-        """
-        if enabled and not self.LTE_SUPPORTS_DL_256QAM:
-            raise RuntimeError('256 QAM is not supported')
-        self.bts[bts_index].lte_dl_modulation_order = \
-            md8475a.ModulationType.Q256 if enabled else md8475a.ModulationType.Q64
-
-    def set_ul_64_qam_enabled(self, bts_index, enabled):
-        """ Determines what MCS table should be used for the uplink.
-
-        Args:
-            bts_index: the base station number
-            enabled: whether 64 QAM should be used
-        """
-        self.bts[bts_index].lte_ul_modulation_order = \
-            md8475a.ModulationType.Q64 if enabled else md8475a.ModulationType.Q16
-
-    def set_mac_padding(self, bts_index, mac_padding):
-        """ Enables or disables MAC padding in the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            mac_padding: the new MAC padding setting
-        """
-        if mac_padding:
-            self.bts[bts_index].tbs_pattern = 'FULLALLOCATION'
-        else:
-            self.bts[bts_index].tbs_pattern = 'OFF'
-
-    def set_lte_rrc_state_change_timer(self, enabled, time=10):
-        """ Configures the LTE RRC state change timer.
-
-        Args:
-            enabled: a boolean indicating if the timer should be on or off.
-            time: time in seconds for the timer to expire
-        """
-        self.anritsu.set_lte_rrc_status_change(enabled)
-        if enabled:
-            self.anritsu.set_lte_rrc_status_change_timer(time)
-
-    def set_cfi(self, bts_index, cfi):
-        """ Sets the Channel Format Indicator for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            cfi: the new CFI setting
-        """
-        self.bts[bts_index].cfi = cfi
-
-    def set_paging_cycle(self, bts_index, cycle_duration):
-        """ Sets the paging cycle duration for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            cycle_duration: the new paging cycle duration in milliseconds
-        """
-        # TODO (b/146068532): implement.
-        self.bts[bts_index].paging_duration = cycle_duration
-
-    def set_phich_resource(self, bts_index, phich):
-        """ Sets the PHICH Resource setting for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            phich: the new PHICH resource setting
-        """
-        self.bts[bts_index].phich_resource = phich
-
-    def set_drx_connected_mode(self, bts_index, active):
-        """ Sets the DRX connected mode
-
-        Args:
-            bts_index: the base station number
-            active: Boolean indicating whether cDRX mode
-                is active
-        """
-        mode = 'MANUAL' if active else 'OFF'
-        self.bts[bts_index].drx_connected_mode = mode
-
-    def set_drx_on_duration_timer(self, bts_index, timer):
-        """ Sets the amount of PDCCH subframes to wait for data after
-            waking up from a DRX cycle
-
-        Args:
-            bts_index: the base station number
-            timer: Number of PDCCH subframes to wait and check for user data
-                after waking from the DRX cycle
-        """
-        self.bts[bts_index].drx_on_duration_timer = timer
-
-    def set_drx_inactivity_timer(self, bts_index, timer):
-        """ Sets the number of PDCCH subframes to wait before entering DRX mode
-
-        Args:
-            bts_index: the base station number
-            timer: The time interval to wait before entering DRX mode
-        """
-        self.bts[bts_index].drx_inactivity_timer = timer
-
-    def set_drx_retransmission_timer(self, bts_index, timer):
-        """ Sets the number of consecutive PDCCH subframes to wait
-        for retransmission
-
-        Args:
-            bts_index: the base station number
-            timer: Number of PDCCH subframes to remain active
-
-        """
-        self.bts[bts_index].drx_retransmission_timer = timer
-
-    def set_drx_long_cycle(self, bts_index, cycle):
-        """ Sets the amount of subframes representing a DRX long cycle.
-
-        Args:
-            bts_index: the base station number
-            cycle: The amount of subframes representing one long DRX cycle.
-                One cycle consists of DRX sleep + DRX on duration
-        """
-        self.bts[bts_index].drx_long_cycle = cycle
-
-    def set_drx_long_cycle_offset(self, bts_index, offset):
-        """ Sets the offset used to determine the subframe number
-        to begin the long drx cycle
-
-        Args:
-            bts_index: the base station number
-            offset: Number in range 0 to (long cycle - 1)
-        """
-        self.bts[bts_index].drx_long_cycle_offset = offset
-
-    def set_band(self, bts_index, band):
-        """ Sets the right duplex mode before switching to a new band.
-
-        Args:
-            bts_index: the base station number
-            band: desired band
-        """
-        bts = self.bts[bts_index]
-
-        # The callbox won't restore the band-dependent default values if the
-        # request is to switch to the same band as the one the base station is
-        # currently using. To ensure that default values are restored, go to a
-        # different band before switching.
-        if int(bts.band) == band:
-            # Using bands 1 and 2 but it could be any others
-            bts.band = '1' if band != 1 else '2'
-            # Switching to config.band will be handled by the parent class
-            # implementation of this method.
-
-        bts.duplex_mode = self.get_duplex_mode(band).value
-        bts.band = band
-        time.sleep(5)  # It takes some time to propagate the new band
-
-    def get_duplex_mode(self, band):
-        """ Determines if the band uses FDD or TDD duplex mode
-
-        Args:
-            band: a band number
-        Returns:
-            an variable of class DuplexMode indicating if band is FDD or TDD
-        """
-
-        if 33 <= int(band) <= 46:
-            return LteSimulation.DuplexMode.TDD
-        else:
-            return LteSimulation.DuplexMode.FDD
-
-    def set_tdd_config(self, bts_index, config):
-        """ Sets the frame structure for TDD bands.
-
-        Args:
-            bts_index: the base station number
-            config: the desired frame structure. An int between 0 and 6.
-        """
-
-        if not 0 <= config <= 6:
-            raise ValueError("The frame structure configuration has to be a "
-                             "number between 0 and 6")
-
-        self.bts[bts_index].uldl_configuration = config
-
-        # Wait for the setting to propagate
-        time.sleep(5)
-
-    def set_ssf_config(self, bts_index, ssf_config):
-        """ Sets the Special Sub-Frame config number for the indicated
-        base station.
-
-        Args:
-            bts_index: the base station number
-            ssf_config: the new ssf config number
-        """
-        # Cast to int in case it was passed as a string
-        ssf_config = int(ssf_config)
-
-        if not 0 <= ssf_config <= 9:
-            raise ValueError('The Special Sub-Frame configuration has to be a '
-                             'number between 0 and 9.')
-
-        self.bts[bts_index].tdd_special_subframe = ssf_config
-
-    def set_bandwidth(self, bts_index, bandwidth):
-        """ Sets the LTE channel bandwidth (MHz)
-
-        Args:
-            bts_index: the base station number
-            bandwidth: desired bandwidth (MHz)
-        """
-        bts = self.bts[bts_index]
-
-        if bandwidth == 20:
-            bts.bandwidth = md8475a.BtsBandwidth.LTE_BANDWIDTH_20MHz
-        elif bandwidth == 15:
-            bts.bandwidth = md8475a.BtsBandwidth.LTE_BANDWIDTH_15MHz
-        elif bandwidth == 10:
-            bts.bandwidth = md8475a.BtsBandwidth.LTE_BANDWIDTH_10MHz
-        elif bandwidth == 5:
-            bts.bandwidth = md8475a.BtsBandwidth.LTE_BANDWIDTH_5MHz
-        elif bandwidth == 3:
-            bts.bandwidth = md8475a.BtsBandwidth.LTE_BANDWIDTH_3MHz
-        elif bandwidth == 1.4:
-            bts.bandwidth = md8475a.BtsBandwidth.LTE_BANDWIDTH_1dot4MHz
-        else:
-            msg = "Bandwidth = {} MHz is not valid for LTE".format(bandwidth)
-            self.log.error(msg)
-            raise ValueError(msg)
-        time.sleep(5)  # It takes some time to propagate the new settings
-
-    def set_mimo_mode(self, bts_index, mimo):
-        """ Sets the number of DL antennas for the desired MIMO mode.
-
-        Args:
-            bts_index: the base station number
-            mimo: object of class MimoMode
-        """
-
-        bts = self.bts[bts_index]
-
-        # If the requested mimo mode is not compatible with the current TM,
-        # warn the user before changing the value.
-
-        if mimo == LteSimulation.MimoMode.MIMO_1x1:
-            if bts.transmode not in [
-                    LteSimulation.TransmissionMode.TM1,
-                    LteSimulation.TransmissionMode.TM7
-            ]:
-                self.log.warning(
-                    "Using only 1 DL antennas is not allowed with "
-                    "the current transmission mode. Changing the "
-                    "number of DL antennas will override this "
-                    "setting.")
-            bts.dl_antenna = 1
-        elif mimo == LteSimulation.MimoMode.MIMO_2x2:
-            if bts.transmode not in [
-                    LteSimulation.TransmissionMode.TM2,
-                    LteSimulation.TransmissionMode.TM3,
-                    LteSimulation.TransmissionMode.TM4,
-                    LteSimulation.TransmissionMode.TM8,
-                    LteSimulation.TransmissionMode.TM9
-            ]:
-                self.log.warning("Using two DL antennas is not allowed with "
-                                 "the current transmission mode. Changing the "
-                                 "number of DL antennas will override this "
-                                 "setting.")
-            bts.dl_antenna = 2
-        elif mimo == LteSimulation.MimoMode.MIMO_4x4 and \
-            self.LTE_SUPPORTS_4X4_MIMO:
-            if bts.transmode not in [
-                    LteSimulation.TransmissionMode.TM2,
-                    LteSimulation.TransmissionMode.TM3,
-                    LteSimulation.TransmissionMode.TM4,
-                    LteSimulation.TransmissionMode.TM9
-            ]:
-                self.log.warning("Using four DL antennas is not allowed with "
-                                 "the current transmission mode. Changing the "
-                                 "number of DL antennas will override this "
-                                 "setting.")
-
-            bts.dl_antenna = 4
-        else:
-            RuntimeError("The requested MIMO mode is not supported.")
-
-    def set_scheduling_mode(self, bts_index, scheduling, mcs_dl, mcs_ul,
-                            nrb_dl, nrb_ul):
-        """ Sets the scheduling mode for LTE
-
-        Args:
-            bts_index: the base station number
-            scheduling: DYNAMIC or STATIC scheduling (Enum list)
-            mcs_dl: Downlink MCS (only for STATIC scheduling)
-            mcs_ul: Uplink MCS (only for STATIC scheduling)
-            nrb_dl: Number of RBs for downlink (only for STATIC scheduling)
-            nrb_ul: Number of RBs for uplink (only for STATIC scheduling)
-        """
-
-        bts = self.bts[bts_index]
-        bts.lte_scheduling_mode = scheduling.value
-
-        if scheduling == LteSimulation.SchedulingMode.STATIC:
-
-            if not all([nrb_dl, nrb_ul, mcs_dl, mcs_ul]):
-                raise ValueError('When the scheduling mode is set to manual, '
-                                 'the RB and MCS parameters are required.')
-
-            bts.packet_rate = md8475a.BtsPacketRate.LTE_MANUAL
-            bts.lte_mcs_dl = mcs_dl
-            bts.lte_mcs_ul = mcs_ul
-            bts.nrb_dl = nrb_dl
-            bts.nrb_ul = nrb_ul
-
-        time.sleep(5)  # It takes some time to propagate the new settings
-
-    def lte_attach_secondary_carriers(self, ue_capability_enquiry):
-        """ Activates the secondary carriers for CA. Requires the DUT to be
-        attached to the primary carrier first.
-
-        Args:
-            ue_capability_enquiry: UE capability enquiry message to be sent to
-        the UE before starting carrier aggregation.
-        """
-
-        # Trigger UE capability enquiry from network to get
-        # UE supported CA band combinations. Here freq_bands is a hex string.
-        self.anritsu.trigger_ue_capability_enquiry(ue_capability_enquiry)
-
-        testcase = self.anritsu.get_AnritsuTestCases()
-        # A bug in the instrument's software (b/139547391) requires the test
-        # procedure to be set to whatever was the previous configuration before
-        # setting it to MULTICELL.
-        testcase.procedure = md8475a.TestProcedure(testcase.procedure)
-        testcase.procedure = md8475a.TestProcedure.PROCEDURE_MULTICELL
-        testcase.power_control = md8475a.TestPowerControl.POWER_CONTROL_DISABLE
-        testcase.measurement_LTE = md8475a.TestMeasurement.MEASUREMENT_DISABLE
-
-        # Enable the secondary carrier base stations for CA
-        for bts_index in range(1, self.num_carriers):
-            self.bts[bts_index].dl_cc_enabled = True
-
-        self.anritsu.start_testcase()
-
-        retry_counter = 0
-        self.log.info("Waiting for the test case to start...")
-        time.sleep(5)
-
-        while self.anritsu.get_testcase_status() == "0":
-            retry_counter += 1
-            if retry_counter == 3:
-                raise RuntimeError(
-                    "The test case failed to start after {} "
-                    "retries. The connection between the phone "
-                    "and the base station might be unstable.".format(
-                        retry_counter))
-            time.sleep(10)
-
-    def set_transmission_mode(self, bts_index, tmode):
-        """ Sets the transmission mode for the LTE basetation
-
-        Args:
-            bts_index: the base station number
-            tmode: Enum list from class 'TransmissionModeLTE'
-        """
-
-        bts = self.bts[bts_index]
-
-        # If the selected transmission mode does not support the number of DL
-        # antennas, throw an exception.
-        if (tmode in [
-                LteSimulation.TransmissionMode.TM1,
-                LteSimulation.TransmissionMode.TM7
-        ] and bts.dl_antenna != '1'):
-            # TM1 and TM7 only support 1 DL antenna
-            raise ValueError("{} allows only one DL antenna. Change the "
-                             "number of DL antennas before setting the "
-                             "transmission mode.".format(tmode.value))
-        elif (tmode == LteSimulation.TransmissionMode.TM8
-              and bts.dl_antenna != '2'):
-            # TM8 requires 2 DL antennas
-            raise ValueError("TM2 requires two DL antennas. Change the "
-                             "number of DL antennas before setting the "
-                             "transmission mode.")
-        elif (tmode in [
-                LteSimulation.TransmissionMode.TM2,
-                LteSimulation.TransmissionMode.TM3,
-                LteSimulation.TransmissionMode.TM4,
-                LteSimulation.TransmissionMode.TM9
-        ] and bts.dl_antenna == '1'):
-            # TM2, TM3, TM4 and TM9 require 2 or 4 DL antennas
-            raise ValueError("{} requires at least two DL atennas. Change the "
-                             "number of DL antennas before setting the "
-                             "transmission mode.".format(tmode.value))
-
-        # The TM mode is allowed for the current number of DL antennas, so it
-        # is safe to change this setting now
-        bts.transmode = tmode.value
-
-        time.sleep(5)  # It takes some time to propagate the new settings
-
-    def wait_until_attached(self, timeout=120):
-        """ Waits until the DUT is attached to the primary carrier.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        try:
-            self.anritsu.wait_for_registration_state(time_to_wait=timeout)
-        except anritsu.AnritsuError:
-            raise cc.CellularSimulatorError('The phone did not attach before '
-                                            'the timeout period ended.')
-
-    def wait_until_communication_state(self, timeout=120):
-        """ Waits until the DUT is in Communication state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        try:
-            self.anritsu.wait_for_communication_state(time_to_wait=timeout)
-        except anritsu.AnritsuError:
-            raise cc.CellularSimulatorError('The phone was not in '
-                                            'Communication state before '
-                                            'the timeout period ended.')
-
-    def wait_until_idle_state(self, timeout=120):
-        """ Waits until the DUT is in Idle state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        try:
-            self.anritsu.wait_for_idle_state(time_to_wait=timeout)
-        except anritsu.AnritsuError:
-            raise cc.CellularSimulatorError('The phone was not in Idle state '
-                                            'before the time the timeout '
-                                            'period ended.')
-
-    def detach(self):
-        """ Turns off all the base stations so the DUT loose connection."""
-        if self.anritsu.get_smartstudio_status() == \
-            md8475a.ProcessingStatus.PROCESS_STATUS_NOTRUN.value:
-            self.log.info('Device cannot be detached because simulation is '
-                          'not running.')
-            return
-        self.anritsu.set_simulation_state_to_poweroff()
-
-    def stop(self):
-        """ Stops current simulation. After calling this method, the simulator
-        will need to be set up again. """
-        self.anritsu.stop_simulation()
-
-    def start_data_traffic(self):
-        """ Starts transmitting data from the instrument to the DUT. """
-        try:
-            self.anritsu.start_ip_traffic()
-        except md8475a.AnritsuError as inst:
-            # This typically happens when traffic is already running.
-            # TODO (b/141962691): continue only if traffic is running
-            self.log.warning(str(inst))
-        time.sleep(4)
-
-    def stop_data_traffic(self):
-        """ Stops transmitting data from the instrument to the DUT. """
-        try:
-            self.anritsu.stop_ip_traffic()
-        except md8475a.AnritsuError as inst:
-            # This typically happens when traffic has already been stopped
-            # TODO (b/141962691): continue only if traffic is stopped
-            self.log.warning(str(inst))
-        time.sleep(2)
-
-    def get_measured_pusch_power(self):
-        """ Queries PUSCH power measured at the callbox.
-
-        Returns:
-            The PUSCH power in the primary input port.
-        """
-        # Try three times before raising an exception. This is needed because
-        # the callbox sometimes reports an active chain as 'DEACTIVE'.
-        retries_left = 3
-
-        while retries_left > 0:
-
-            ul_pusch = self.anritsu.get_measured_pusch_power().split(',')[0]
-
-            if ul_pusch != 'DEACTIVE':
-                return float(ul_pusch)
-
-            time.sleep(3)
-            retries_left -= 1
-            self.log.info('Chain shows as inactive. %d retries left.' %
-                          retries_left)
-
-        raise cc.CellularSimulatorError('Could not get measured PUSCH power.')
-
-
-class MD8475BCellularSimulator(MD8475CellularSimulator):
-
-    MD8475_VERSION = 'B'
-
-    # Indicates if it is able to use 256 QAM as the downlink modulation for LTE
-    LTE_SUPPORTS_DL_256QAM = True
-
-    # Indicates if it is able to use 64 QAM as the uplink modulation for LTE
-    LTE_SUPPORTS_UL_64QAM = True
-
-    # Indicates if 4x4 MIMO is supported for LTE
-    LTE_SUPPORTS_4X4_MIMO = True
-
-    # The maximum number of carriers that this simulator can support for LTE
-    LTE_MAX_CARRIERS = 4
-
-    # The maximum power that the equipment is able to transmit
-    MAX_DL_POWER = -30
-
-    # Simulation config files in the callbox computer.
-    # These should be replaced in the future by setting up
-    # the same configuration manually.
-    LTE_BASIC_SIM_FILE = 'SIM_default_LTE.wnssp2'
-    LTE_BASIC_CELL_FILE = 'CELL_LTE_config.wnscp2'
-    LTE_CA_BASIC_SIM_FILE = 'SIM_LTE_CA.wnssp2'
-    LTE_CA_BASIC_CELL_FILE = 'CELL_LTE_CA_config.wnscp2'
-
-    # Filepath to the config files stored in the Anritsu callbox. Needs to be
-    # formatted to replace {} with either A or B depending on the model.
-    CALLBOX_CONFIG_PATH = 'C:\\Users\\MD8475B\\Documents\\DAN_configs\\'
-
-    def setup_lte_scenario(self):
-        """ The B model can support up to five carriers. """
-
-        super().setup_lte_scenario()
-
-        self.bts.extend([
-            self.anritsu.get_BTS(md8475a.BtsNumber.BTS3),
-            self.anritsu.get_BTS(md8475a.BtsNumber.BTS4),
-            self.anritsu.get_BTS(md8475a.BtsNumber.BTS5)
-        ])
diff --git a/src/antlion/controllers/anritsu_lib/md8475a.py b/src/antlion/controllers/anritsu_lib/md8475a.py
deleted file mode 100644
index ac67229..0000000
--- a/src/antlion/controllers/anritsu_lib/md8475a.py
+++ /dev/null
@@ -1,5066 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Controller interface for Anritsu Signalling Tester MD8475A.
-"""
-
-import logging
-import time
-import socket
-from enum import Enum
-from enum import IntEnum
-
-from antlion.controllers.anritsu_lib._anritsu_utils import AnritsuError
-from antlion.controllers.anritsu_lib._anritsu_utils import AnritsuUtils
-from antlion.controllers.anritsu_lib._anritsu_utils import NO_ERROR
-from antlion.controllers.anritsu_lib._anritsu_utils import OPERATION_COMPLETE
-
-from antlion import tracelogger
-
-TERMINATOR = "\0"
-
-# The following wait times (except COMMUNICATION_STATE_WAIT_TIME) are actually
-# the times for socket to time out. Increasing them is to make sure there is
-# enough time for MD8475A operation to be completed in some cases.
-# It won't increase test execution time.
-SMARTSTUDIO_LAUNCH_WAIT_TIME = 300  # was 90
-SMARTSTUDIO_SIMULATION_START_WAIT_TIME = 300  # was 120
-REGISTRATION_STATE_WAIT_TIME = 240
-LOAD_SIMULATION_PARAM_FILE_WAIT_TIME = 30
-COMMUNICATION_STATE_WAIT_TIME = 240
-ANRITSU_SOCKET_BUFFER_SIZE = 8192
-COMMAND_COMPLETE_WAIT_TIME = 180  # was 90
-SETTLING_TIME = 1
-WAIT_TIME_IDENTITY_RESPONSE = 5
-IDLE_STATE_WAIT_TIME = 240
-
-IMSI_READ_USERDATA_WCDMA = "081501"
-IMEI_READ_USERDATA_WCDMA = "081502"
-IMEISV_READ_USERDATA_WCDMA = "081503"
-IMSI_READ_USERDATA_LTE = "075501"
-IMEI_READ_USERDATA_LTE = "075502"
-IMEISV_READ_USERDATA_LTE = "075503"
-IMSI_READ_USERDATA_GSM = "081501"
-IMEI_READ_USERDATA_GSM = "081502"
-IMEISV_READ_USERDATA_GSM = "081503"
-IDENTITY_REQ_DATA_LEN = 24
-SEQ_LOG_MESSAGE_START_INDEX = 60
-
-WCDMA_BANDS = {
-    "I": "1",
-    "II": "2",
-    "III": "3",
-    "IV": "4",
-    "V": "5",
-    "VI": "6",
-    "VII": "7",
-    "VIII": "8",
-    "IX": "9",
-    "X": "10",
-    "XI": "11",
-    "XII": "12",
-    "XIII": "13",
-    "XIV": "14"
-}
-
-
-def create(configs):
-    objs = []
-    for c in configs:
-        ip_address = c["ip_address"]
-        objs.append(MD8475A(ip_address))
-    return objs
-
-
-def destroy(objs):
-    return
-
-
-class ProcessingStatus(Enum):
-    ''' MD8475A processing status for UE,Packet,Voice,Video,SMS,
-        PPP, PWS '''
-    PROCESS_STATUS_NONE = "NONE"
-    PROCESS_STATUS_NOTRUN = "NOTRUN"
-    PROCESS_STATUS_POWEROFF = "POWEROFF"
-    PROCESS_STATUS_REGISTRATION = "REGISTRATION"
-    PROCESS_STATUS_DETACH = "DETACH"
-    PROCESS_STATUS_IDLE = "IDLE"
-    PROCESS_STATUS_ORIGINATION = "ORIGINATION"
-    PROCESS_STATUS_HANDOVER = "HANDOVER"
-    PROCESS_STATUS_UPDATING = "UPDATING"
-    PROCESS_STATUS_TERMINATION = "TERMINATION"
-    PROCESS_STATUS_COMMUNICATION = "COMMUNICATION"
-    PROCESS_STATUS_UERELEASE = "UERELEASE"
-    PROCESS_STATUS_NWRELEASE = "NWRELEASE"
-
-
-class BtsNumber(Enum):
-    '''ID number for MD8475A supported BTS '''
-    BTS1 = "BTS1"
-    BTS2 = "BTS2"
-    BTS3 = "BTS3"
-    BTS4 = "BTS4"
-    BTS5 = "BTS5"
-
-
-class BtsTechnology(Enum):
-    ''' BTS system technology'''
-    LTE = "LTE"
-    WCDMA = "WCDMA"
-    TDSCDMA = "TDSCDMA"
-    GSM = "GSM"
-    CDMA1X = "CDMA1X"
-    EVDO = "EVDO"
-
-
-class BtsBandwidth(Enum):
-    ''' Values for Cell Bandwidth '''
-    LTE_BANDWIDTH_1dot4MHz = "1.4MHz"
-    LTE_BANDWIDTH_3MHz = "3MHz"
-    LTE_BANDWIDTH_5MHz = "5MHz"
-    LTE_BANDWIDTH_10MHz = "10MHz"
-    LTE_BANDWIDTH_15MHz = "15MHz"
-    LTE_BANDWIDTH_20MHz = "20MHz"
-
-    def get_float_value(bts_bandwidth):
-        """ Returns a float representing the bandwidth in MHz.
-
-        Args:
-            bts_bandwidth: a BtsBandwidth enum or a string matching one of the
-            values in the BtsBandwidth enum.
-        """
-
-        if isinstance(bts_bandwidth, BtsBandwidth):
-            bandwidth_str = bts_bandwidth.value
-        elif isinstance(bts_bandwidth, str):
-            bandwidth_str = bts_bandwidth
-        else:
-            raise TypeError('bts_bandwidth should be an instance of string or '
-                            'BtsBandwidth. ')
-
-        if bandwidth_str == BtsBandwidth.LTE_BANDWIDTH_20MHz.value:
-            return 20
-        elif bandwidth_str == BtsBandwidth.LTE_BANDWIDTH_15MHz.value:
-            return 15
-        elif bandwidth_str == BtsBandwidth.LTE_BANDWIDTH_10MHz.value:
-            return 10
-        elif bandwidth_str == BtsBandwidth.LTE_BANDWIDTH_5MHz.value:
-            return 5
-        elif bandwidth_str == BtsBandwidth.LTE_BANDWIDTH_3MHz.value:
-            return 3
-        elif bandwidth_str == BtsBandwidth.LTE_BANDWIDTH_1dot4MHz.value:
-            return 1.4
-        else:
-            raise ValueError(
-                'Could not map {} to a bandwidth value.'.format(bandwidth_str))
-
-
-MAX_NRB_FOR_BANDWIDTH = {
-    BtsBandwidth.LTE_BANDWIDTH_1dot4MHz.value: 6,
-    BtsBandwidth.LTE_BANDWIDTH_3MHz.value: 15,
-    BtsBandwidth.LTE_BANDWIDTH_5MHz.value: 25,
-    BtsBandwidth.LTE_BANDWIDTH_10MHz.value: 50,
-    BtsBandwidth.LTE_BANDWIDTH_15MHz.value: 75,
-    BtsBandwidth.LTE_BANDWIDTH_20MHz.value: 100
-}
-
-
-class LteMimoMode(Enum):
-    """ Values for LTE MIMO modes. """
-    NONE = "MIMONOT"
-    MIMO_2X2 = "MIMO2X2"
-    MIMO_4X4 = "MIMO4X4"
-
-
-class BtsGprsMode(Enum):
-    ''' Values for Gprs Modes '''
-    NO_GPRS = "NO_GPRS"
-    GPRS = "GPRS"
-    EGPRS = "EGPRS"
-
-
-class BtsPacketRate(Enum):
-    ''' Values for Cell Packet rate '''
-    LTE_MANUAL = "MANUAL"
-    LTE_BESTEFFORT = "BESTEFFORT"
-    WCDMA_DL384K_UL64K = "DL384K_UL64K"
-    WCDMA_DLHSAUTO_REL7_UL384K = "DLHSAUTO_REL7_UL384K"
-    WCDMA_DL18_0M_UL384K = "DL18_0M_UL384K"
-    WCDMA_DL21_6M_UL384K = "DL21_6M_UL384K"
-    WCDMA_DLHSAUTO_REL7_ULHSAUTO = "DLHSAUTO_REL7_ULHSAUTO"
-    WCDMA_DL18_0M_UL1_46M = "DL18_0M_UL1_46M"
-    WCDMA_DL18_0M_UL2_0M = "DL18_0M_UL2_0M"
-    WCDMA_DL18_0M_UL5_76M = "DL18_0M_UL5_76M"
-    WCDMA_DL21_6M_UL1_46M = "DL21_6M_UL1_46M"
-    WCDMA_DL21_6M_UL2_0M = "DL21_6M_UL2_0M"
-    WCDMA_DL21_6M_UL5_76M = "DL21_6M_UL5_76M"
-    WCDMA_DLHSAUTO_REL8_UL384K = "DLHSAUTO_REL8_UL384K"
-    WCDMA_DL23_4M_UL384K = "DL23_4M_UL384K"
-    WCDMA_DL28_0M_UL384K = "DL28_0M_UL384K"
-    WCDMA_DL36_0M_UL384K = "DL36_0M_UL384K"
-    WCDMA_DL43_2M_UL384K = "DL43_2M_UL384K"
-    WCDMA_DLHSAUTO_REL8_ULHSAUTO = "DLHSAUTO_REL8_ULHSAUTO"
-    WCDMA_DL23_4M_UL1_46M = "DL23_4M_UL1_46M"
-    WCDMA_DL23_4M_UL2_0M = "DL23_4M_UL2_0M"
-    WCDMA_DL23_4M_UL5_76M = "DL23_4M_UL5_76M"
-    WCDMA_DL28_0M_UL1_46M = "DL28_0M_UL1_46M"
-    WCDMA_DL28_0M_UL2_0M = "DL28_0M_UL2_0M"
-    WCDMA_DL28_0M_UL5_76M = "L28_0M_UL5_76M"
-    WCDMA_DL36_0M_UL1_46M = "DL36_0M_UL1_46M"
-    WCDMA_DL36_0M_UL2_0M = "DL36_0M_UL2_0M"
-    WCDMA_DL36_0M_UL5_76M = "DL36_0M_UL5_76M"
-    WCDMA_DL43_2M_UL1_46M = "DL43_2M_UL1_46M"
-    WCDMA_DL43_2M_UL2_0M = "DL43_2M_UL2_0M"
-    WCDMA_DL43_2M_UL5_76M = "DL43_2M_UL5_76M"
-
-
-class BtsPacketWindowSize(Enum):
-    ''' Values for Cell Packet window size '''
-    WINDOW_SIZE_1 = 1
-    WINDOW_SIZE_8 = 8
-    WINDOW_SIZE_16 = 16
-    WINDOW_SIZE_32 = 32
-    WINDOW_SIZE_64 = 64
-    WINDOW_SIZE_128 = 128
-    WINDOW_SIZE_256 = 256
-    WINDOW_SIZE_512 = 512
-    WINDOW_SIZE_768 = 768
-    WINDOW_SIZE_1024 = 1024
-    WINDOW_SIZE_1536 = 1536
-    WINDOW_SIZE_2047 = 2047
-
-
-class BtsServiceState(Enum):
-    ''' Values for BTS service state '''
-    SERVICE_STATE_IN = "IN"
-    SERVICE_STATE_OUT = "OUT"
-
-
-class BtsCellBarred(Enum):
-    ''' Values for Cell barred parameter '''
-    NOTBARRED = "NOTBARRED"
-    BARRED = "BARRED"
-
-
-class BtsAccessClassBarred(Enum):
-    ''' Values for Access class barred parameter '''
-    NOTBARRED = "NOTBARRED"
-    EMERGENCY = "EMERGENCY"
-    BARRED = "BARRED"
-    USERSPECIFIC = "USERSPECIFIC"
-
-
-class BtsLteEmergencyAccessClassBarred(Enum):
-    ''' Values for Lte emergency access class barred parameter '''
-    NOTBARRED = "NOTBARRED"
-    BARRED = "BARRED"
-
-
-class BtsNwNameEnable(Enum):
-    ''' Values for BT network name enable parameter '''
-    NAME_ENABLE = "ON"
-    NAME_DISABLE = "OFF"
-
-
-class IPAddressType(Enum):
-    ''' Values for IP address type '''
-    IPV4 = "IPV4"
-    IPV6 = "IPV6"
-    IPV4V6 = "IPV4V6"
-
-
-class TriggerMessageIDs(Enum):
-    ''' ID for Trigger messages  '''
-    RRC_CONNECTION_REQ = 111101
-    RRC_CONN_REESTABLISH_REQ = 111100
-    ATTACH_REQ = 141141
-    DETACH_REQ = 141145
-    MM_LOC_UPDATE_REQ = 221108
-    GMM_ATTACH_REQ = 241101
-    GMM_RA_UPDATE_REQ = 241108
-    IDENTITY_REQUEST_LTE = 141155
-    IDENTITY_REQUEST_WCDMA = 241115
-    IDENTITY_REQUEST_GSM = 641115
-    UE_CAPABILITY_ENQUIRY = 111167
-
-
-class TriggerMessageReply(Enum):
-    ''' Values for Trigger message reply parameter '''
-    ACCEPT = "ACCEPT"
-    REJECT = "REJECT"
-    IGNORE = "IGNORE"
-    NONE = "NONE"
-    ILLEGAL = "ILLEGAL"
-
-
-class TestProcedure(Enum):
-    ''' Values for different Test procedures in MD8475A '''
-    PROCEDURE_BL = "BL"
-    PROCEDURE_SELECTION = "SELECTION"
-    PROCEDURE_RESELECTION = "RESELECTION"
-    PROCEDURE_REDIRECTION = "REDIRECTION"
-    PROCEDURE_HO = "HO"
-    PROCEDURE_HHO = "HHO"
-    PROCEDURE_SHO = "SHO"
-    PROCEDURE_MEASUREMENT = "MEASUREMENT"
-    PROCEDURE_CELLCHANGE = "CELLCHANGE"
-    PROCEDURE_MULTICELL = "MULTICELL"
-
-
-class TestPowerControl(Enum):
-    ''' Values for power control in test procedure '''
-    POWER_CONTROL_ENABLE = "ENABLE"
-    POWER_CONTROL_DISABLE = "DISABLE"
-
-
-class TestMeasurement(Enum):
-    ''' Values for mesaurement in test procedure '''
-    MEASUREMENT_ENABLE = "ENABLE"
-    MEASUREMENT_DISABLE = "DISABLE"
-
-
-'''MD8475A processing states'''
-_PROCESS_STATES = {
-    "NONE": ProcessingStatus.PROCESS_STATUS_NONE,
-    "NOTRUN": ProcessingStatus.PROCESS_STATUS_NOTRUN,
-    "POWEROFF": ProcessingStatus.PROCESS_STATUS_POWEROFF,
-    "REGISTRATION": ProcessingStatus.PROCESS_STATUS_REGISTRATION,
-    "DETACH": ProcessingStatus.PROCESS_STATUS_DETACH,
-    "IDLE": ProcessingStatus.PROCESS_STATUS_IDLE,
-    "ORIGINATION": ProcessingStatus.PROCESS_STATUS_ORIGINATION,
-    "HANDOVER": ProcessingStatus.PROCESS_STATUS_HANDOVER,
-    "UPDATING": ProcessingStatus.PROCESS_STATUS_UPDATING,
-    "TERMINATION": ProcessingStatus.PROCESS_STATUS_TERMINATION,
-    "COMMUNICATION": ProcessingStatus.PROCESS_STATUS_COMMUNICATION,
-    "UERELEASE": ProcessingStatus.PROCESS_STATUS_UERELEASE,
-    "NWRELEASE": ProcessingStatus.PROCESS_STATUS_NWRELEASE,
-}
-
-
-class ImsCscfStatus(Enum):
-    """ MD8475A ims cscf status for UE
-    """
-    OFF = "OFF"
-    SIPIDLE = "SIPIDLE"
-    CONNECTED = "CONNECTED"
-    CALLING = "CALLING"
-    RINGING = "RINGING"
-    UNKNOWN = "UNKNOWN"
-
-
-class ImsCscfCall(Enum):
-    """ MD8475A ims cscf call action
-    """
-    MAKE = "MAKE"
-    END = "END"
-    MAKEVIDEO = "MAKEVIDEO"
-    MAKE2ND = "MAKE2ND"
-    END2ND = "END2ND"
-    ANSWER = "ANSWER"
-    HOLD = "HOLD"
-    RESUME = "RESUME"
-
-
-class VirtualPhoneStatus(IntEnum):
-    ''' MD8475A virtual phone status for UE voice and UE video
-        PPP, PWS '''
-    STATUS_IDLE = 0
-    STATUS_VOICECALL_ORIGINATION = 1
-    STATUS_VOICECALL_INCOMING = 2
-    STATUS_VOICECALL_INPROGRESS = 3
-    STATUS_VOICECALL_DISCONNECTING = 4
-    STATUS_VOICECALL_DISCONNECTED = 5
-    STATUS_VIDEOCALL_ORIGINATION = 6
-    STATUS_VIDEOCALL_INCOMING = 7
-    STATUS_VIDEOCALL_INPROGRESS = 8
-    STATUS_VIDEOCALL_DISCONNECTING = 9
-    STATUS_VIDEOCALL_DISCONNECTED = 10
-
-
-'''Virtual Phone Status '''
-_VP_STATUS = {
-    "0": VirtualPhoneStatus.STATUS_IDLE,
-    "1": VirtualPhoneStatus.STATUS_VOICECALL_ORIGINATION,
-    "2": VirtualPhoneStatus.STATUS_VOICECALL_INCOMING,
-    "3": VirtualPhoneStatus.STATUS_VOICECALL_INPROGRESS,
-    "4": VirtualPhoneStatus.STATUS_VOICECALL_DISCONNECTING,
-    "5": VirtualPhoneStatus.STATUS_VOICECALL_DISCONNECTED,
-    "6": VirtualPhoneStatus.STATUS_VIDEOCALL_ORIGINATION,
-    "7": VirtualPhoneStatus.STATUS_VIDEOCALL_INCOMING,
-    "8": VirtualPhoneStatus.STATUS_VIDEOCALL_INPROGRESS,
-    "9": VirtualPhoneStatus.STATUS_VIDEOCALL_DISCONNECTING,
-    "10": VirtualPhoneStatus.STATUS_VIDEOCALL_DISCONNECTED,
-}
-
-
-class VirtualPhoneAutoAnswer(Enum):
-    ''' Virtual phone auto answer enable values'''
-    ON = "ON"
-    OFF = "OFF"
-
-
-class CsfbType(Enum):
-    ''' CSFB Type values'''
-    CSFB_TYPE_REDIRECTION = "REDIRECTION"
-    CSFB_TYPE_HANDOVER = "HO"
-
-
-class ReturnToEUTRAN(Enum):
-    '''Return to EUTRAN setting values '''
-    RETEUTRAN_ENABLE = "ENABLE"
-    RETEUTRAN_DISABLE = "DISABLE"
-
-
-class CTCHSetup(Enum):
-    '''CTCH setting values '''
-    CTCH_ENABLE = "ENABLE"
-    CTCH_DISABLE = "DISABLE"
-
-
-class UEIdentityType(Enum):
-    '''UE Identity type values '''
-    IMSI = "IMSI"
-    IMEI = "IMEI"
-    IMEISV = "IMEISV"
-
-
-class CBCHSetup(Enum):
-    '''CBCH setting values '''
-    CBCH_ENABLE = "ENABLE"
-    CBCH_DISABLE = "DISABLE"
-
-
-class Switch(Enum):
-    ''' Values for ENABLE or DISABLE '''
-    ENABLE = "ENABLE"
-    DISABLE = "DISABLE"
-
-
-class ModulationType(Enum):
-    """Supported Modulation Types."""
-    Q16 = '16QAM'
-    Q64 = '64QAM'
-    Q256 = '256QAM'
-
-
-class MD8475A(object):
-    """Class to communicate with Anritsu MD8475A Signalling Tester.
-       This uses GPIB command to interface with Anritsu MD8475A """
-    def __init__(self, ip_address, wlan=False, md8475_version="A"):
-        self._error_reporting = True
-        self._ipaddr = ip_address
-        self.log = tracelogger.TraceLogger(logging.getLogger())
-        self._wlan = wlan
-        port_number = 28002
-        self._md8475_version = md8475_version
-        if md8475_version == "B":
-            global TERMINATOR
-            TERMINATOR = "\n"
-            port_number = 5025
-
-        # Open socket connection to Signaling Tester
-        self.log.info("Opening Socket Connection with "
-                      "Signaling Tester ({}) ".format(self._ipaddr))
-        try:
-            self._sock = socket.create_connection((self._ipaddr, port_number),
-                                                  timeout=120)
-            self.send_query("*IDN?", 60)
-            self.log.info("Communication with Signaling Tester OK.")
-            self.log.info("Opened Socket connection to ({})"
-                          "with handle ({})".format(self._ipaddr, self._sock))
-            # launching Smart Studio Application needed for the simulation
-            ret = self.launch_smartstudio()
-        except socket.timeout:
-            raise AnritsuError("Timeout happened while conencting to"
-                               " Anritsu MD8475A")
-        except socket.error:
-            raise AnritsuError("Socket creation error")
-
-    def get_BTS(self, btsnumber):
-        """ Returns the BTS object based on the BTS number provided
-
-        Args:
-            btsnumber: BTS number (BTS1, BTS2)
-
-        Returns:
-            BTS object
-        """
-        return _BaseTransceiverStation(self, btsnumber)
-
-    def get_AnritsuTestCases(self):
-        """ Returns the Anritsu Test Case Module Object
-
-        Args:
-            None
-
-        Returns:
-            Anritsu Test Case Module Object
-        """
-        return _AnritsuTestCases(self)
-
-    def get_VirtualPhone(self):
-        """ Returns the Anritsu Virtual Phone Module Object
-
-        Args:
-            None
-
-        Returns:
-            Anritsu Virtual Phone Module Object
-        """
-        return _VirtualPhone(self)
-
-    def get_PDN(self, pdn_number):
-        """ Returns the PDN Module Object
-
-        Args:
-            None
-
-        Returns:
-            Anritsu PDN Module Object
-        """
-        return _PacketDataNetwork(self, pdn_number)
-
-    def get_TriggerMessage(self):
-        """ Returns the Anritsu Trigger Message Module Object
-
-        Args:
-            None
-
-        Returns:
-            Anritsu Trigger Message Module Object
-        """
-        return _TriggerMessage(self)
-
-    def get_IMS(self, vnid):
-        """ Returns the IMS Module Object with VNID
-
-        Args:
-            vnid: Virtual Network ID
-
-        Returns:
-            Anritsu IMS VNID Module Object
-        """
-        return _IMS_Services(self, vnid)
-
-    def get_ims_cscf_status(self, virtual_network_id):
-        """ Get the IMS CSCF Status of virtual network
-
-        Args:
-            virtual_network_id: virtual network id
-
-        Returns:
-            IMS CSCF status
-        """
-        cmd = "IMSCSCFSTAT? {}".format(virtual_network_id)
-        return self.send_query(cmd)
-
-    def ims_cscf_call_action(self, virtual_network_id, action):
-        """ IMS CSCF Call action
-
-        Args:
-            virtual_network_id: virtual network id
-            action: action to make
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFCALL {},{}".format(virtual_network_id, action)
-        self.send_command(cmd)
-
-    def send_query(self, query, sock_timeout=120):
-        """ Sends a Query message to Anritsu and return response
-
-        Args:
-            query - Query string
-
-        Returns:
-            query response
-        """
-        self.log.info("--> {}".format(query))
-        querytoSend = (query + TERMINATOR).encode('utf-8')
-        self._sock.settimeout(sock_timeout)
-        try:
-            self._sock.send(querytoSend)
-            result = self._sock.recv(ANRITSU_SOCKET_BUFFER_SIZE).rstrip(
-                TERMINATOR.encode('utf-8'))
-            response = result.decode('utf-8')
-            self.log.info('<-- {}'.format(response))
-            return response
-        except socket.timeout:
-            raise AnritsuError("Timeout: Response from Anritsu")
-        except socket.error:
-            raise AnritsuError("Socket Error")
-
-    def send_command(self, command, sock_timeout=120):
-        """ Sends a Command message to Anritsu
-
-        Args:
-            command - command string
-
-        Returns:
-            None
-        """
-        self.log.info("--> {}".format(command))
-        if self._error_reporting:
-            cmdToSend = (command + ";ERROR?" + TERMINATOR).encode('utf-8')
-            self._sock.settimeout(sock_timeout)
-            try:
-                self._sock.send(cmdToSend)
-                err = self._sock.recv(ANRITSU_SOCKET_BUFFER_SIZE).rstrip(
-                    TERMINATOR.encode('utf-8'))
-                error = int(err.decode('utf-8'))
-                if error != NO_ERROR:
-                    raise AnritsuError(error, command)
-            except socket.timeout:
-                raise AnritsuError("Timeout for Command Response from Anritsu")
-            except socket.error:
-                raise AnritsuError("Socket Error for Anritsu command")
-            except Exception as e:
-                raise AnritsuError(e, command)
-        else:
-            cmdToSend = (command + TERMINATOR).encode('utf-8')
-            try:
-                self._sock.send(cmdToSend)
-            except socket.error:
-                raise AnritsuError("Socket Error", command)
-            return
-
-    def launch_smartstudio(self):
-        """ launch the Smart studio application
-            This should be done before stating simulation
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        # check the Smart Studio status . If Smart Studio doesn't exist ,
-        # start it.if it is running, stop it. Smart Studio should be in
-        # NOTRUN (Simulation Stopped) state to start new simulation
-        stat = self.send_query("STAT?", 30)
-        if stat == "NOTEXIST":
-            self.log.info("Launching Smart Studio Application,"
-                          "it takes about a minute.")
-            time_to_wait = SMARTSTUDIO_LAUNCH_WAIT_TIME
-            sleep_interval = 15
-            waiting_time = 0
-
-            err = self.send_command("RUN", SMARTSTUDIO_LAUNCH_WAIT_TIME)
-            stat = self.send_query("STAT?")
-            while stat != "NOTRUN":
-                time.sleep(sleep_interval)
-                waiting_time = waiting_time + sleep_interval
-                if waiting_time <= time_to_wait:
-                    stat = self.send_query("STAT?")
-                else:
-                    raise AnritsuError("Timeout: Smart Studio launch")
-        elif stat == "RUNNING":
-            # Stop simulation if necessary
-            self.send_command("STOP", 60)
-            stat = self.send_query("STAT?")
-
-        # The state of the Smart Studio should be NOTRUN at this point
-        # after the one of the steps from above
-        if stat != "NOTRUN":
-            self.log.info(
-                "Can not launch Smart Studio, "
-                "please shut down all the Smart Studio SW components")
-            raise AnritsuError("Could not run SmartStudio")
-
-    def close_smartstudio(self):
-        """ Closes the Smart studio application
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        self.stop_simulation()
-        self.send_command("EXIT", 60)
-
-    def get_smartstudio_status(self):
-        """ Gets the Smart studio status
-
-        Args:
-            None
-
-        Returns:
-            Smart studio status
-        """
-        return self.send_query("STAT?")
-
-    def start_simulation(self):
-        """ Starting the simulation of the network model.
-            simulation model or simulation parameter file
-            should be set before starting the simulation
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        time_to_wait = SMARTSTUDIO_SIMULATION_START_WAIT_TIME
-        sleep_interval = 2
-        waiting_time = 0
-
-        self.send_command("START", SMARTSTUDIO_SIMULATION_START_WAIT_TIME)
-
-        self.log.info("Waiting for CALLSTAT=POWEROFF")
-        callstat = self.send_query("CALLSTAT? BTS1").split(",")
-        while callstat[0] != "POWEROFF":
-            time.sleep(sleep_interval)
-            waiting_time += sleep_interval
-            if waiting_time <= time_to_wait:
-                callstat = self.send_query("CALLSTAT? BTS1").split(",")
-            else:
-                raise AnritsuError("Timeout: Starting simulation")
-
-    def stop_simulation(self):
-        """ Stop simulation operation
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        # Stop virtual network (IMS) #1 if still running
-        # this is needed before Sync command is supported in 6.40a
-        if self.send_query("IMSVNSTAT? 1") == "RUNNING":
-            self.send_command("IMSSTOPVN 1")
-        if self.send_query("IMSVNSTAT? 2") == "RUNNING":
-            self.send_command("IMSSTOPVN 2")
-        stat = self.send_query("STAT?")
-        # Stop simulation if its is RUNNING
-        if stat == "RUNNING":
-            self.send_command("STOP", 60)
-            stat = self.send_query("STAT?")
-            if stat != "NOTRUN":
-                self.log.info("Failed to stop simulation")
-                raise AnritsuError("Failed to stop simulation")
-
-    def reset(self):
-        """ reset simulation parameters
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        self.send_command("*RST", COMMAND_COMPLETE_WAIT_TIME)
-
-    def load_simulation_paramfile(self, filepath):
-        """ loads simulation model parameter file
-        Args:
-          filepath : simulation model parameter file path
-
-        Returns:
-            None
-        """
-        self.stop_simulation()
-        cmd = "LOADSIMPARAM \"" + filepath + '\";ERROR?'
-        self.send_query(cmd, LOAD_SIMULATION_PARAM_FILE_WAIT_TIME)
-
-    def load_cell_paramfile(self, filepath):
-        """ loads cell model parameter file
-
-        Args:
-          filepath : cell model parameter file path
-
-        Returns:
-            None
-        """
-        self.stop_simulation()
-        cmd = "LOADCELLPARAM \"" + filepath + '\";ERROR?'
-        status = int(self.send_query(cmd))
-        if status != NO_ERROR:
-            raise AnritsuError(status, cmd)
-
-    def _set_simulation_model(self, sim_model, reset=True):
-        """ Set simulation model and valid the configuration
-
-        Args:
-            sim_model: simulation model
-            reset: if True, reset the simulation after setting the new
-            simulation model
-        Returns:
-            True/False
-        """
-        error = int(
-            self.send_query("SIMMODEL %s;ERROR?" % sim_model,
-                            COMMAND_COMPLETE_WAIT_TIME))
-        if error:  # Try again if first set SIMMODEL fails
-            time.sleep(3)
-            if "WLAN" in sim_model:
-                new_sim_model = sim_model[:-5]
-                error = int(
-                    self.send_query("SIMMODEL %s;ERROR?" % new_sim_model,
-                                    COMMAND_COMPLETE_WAIT_TIME))
-                time.sleep(3)
-            error = int(
-                self.send_query("SIMMODEL %s;ERROR?" % sim_model,
-                                COMMAND_COMPLETE_WAIT_TIME))
-            if error:
-                return False
-        if reset:
-            # Reset might be necessary because SIMMODEL will load
-            # some of the contents from previous parameter files.
-            self.reset()
-        return True
-
-    def set_simulation_model(self, *bts_rats, reset=True):
-        """ Stops the simulation and then sets the simulation model.
-
-        Args:
-            *bts_rats: base station rats for BTS 1 to 5.
-            reset: if True, reset the simulation after setting the new
-            simulation model
-        Returns:
-            True or False
-        """
-        self.stop_simulation()
-        if len(bts_rats) not in range(1, 6):
-            raise ValueError(
-                "set_simulation_model requires 1 to 5 BTS values.")
-        simmodel = ",".join(bts_rat.value for bts_rat in bts_rats)
-        if self._wlan:
-            simmodel = simmodel + "," + "WLAN"
-        return self._set_simulation_model(simmodel, reset)
-
-    def get_simulation_model(self):
-        """ Gets the simulation model
-
-        Args:
-            None
-
-        Returns:
-            Current simulation model
-        """
-        cmd = "SIMMODEL?"
-        return self.send_query(cmd)
-
-    def get_lte_rrc_status_change(self):
-        """ Gets the LTE RRC status change function state
-
-        Returns:
-            Boolean: True is Enabled / False is Disabled
-        """
-        cmd = "L_RRCSTAT?"
-        return self.send_query(cmd) == "ENABLE"
-
-    def set_lte_rrc_status_change(self, status_change):
-        """ Enables or Disables the LTE RRC status change function
-
-        Returns:
-            None
-        """
-        cmd = "L_RRCSTAT "
-        if status_change:
-            cmd += "ENABLE"
-        else:
-            cmd += "DISABLE"
-        self.send_command(cmd)
-
-    def get_lte_rrc_status_change_timer(self):
-        """ Gets the LTE RRC Status Change Timer
-
-        Returns:
-            returns a status change timer integer value
-        """
-        cmd = "L_STATTMR?"
-        return self.send_query(cmd)
-
-    def set_lte_rrc_status_change_timer(self, time):
-        """ Sets the LTE RRC Status Change Timer parameter
-
-        Returns:
-            None
-        """
-        cmd = "L_STATTMR %s" % time
-        self.send_command(cmd)
-
-    def set_umts_rrc_status_change(self, status_change):
-        """ Enables or Disables the UMTS RRC status change function
-
-        Returns:
-            None
-        """
-        cmd = "W_RRCSTAT "
-        if status_change:
-            cmd += "ENABLE"
-        else:
-            cmd += "DISABLE"
-        self.send_command(cmd)
-
-    def get_umts_rrc_status_change(self):
-        """ Gets the UMTS RRC Status Change
-
-        Returns:
-            Boolean: True is Enabled / False is Disabled
-        """
-        cmd = "W_RRCSTAT?"
-        return self.send_query(cmd)
-
-    def set_umts_dch_stat_timer(self, timer_seconds):
-        """ Sets the UMTS RRC DCH timer
-
-        Returns:
-            None
-        """
-        cmd = "W_STATTMRDCH %s" % timer_seconds
-        self.send_command(cmd)
-
-    def set_simulation_state_to_poweroff(self):
-        """ Sets the simulation state to POWER OFF
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        self.send_command("RESETSIMULATION POWEROFF")
-        time_to_wait = 30
-        sleep_interval = 2
-        waiting_time = 0
-
-        self.log.info("Waiting for CALLSTAT=POWEROFF")
-        callstat = self.send_query("CALLSTAT?").split(",")
-        while callstat[0] != "POWEROFF":
-            time.sleep(sleep_interval)
-            waiting_time = waiting_time + sleep_interval
-            if waiting_time <= time_to_wait:
-                callstat = self.send_query("CALLSTAT?").split(",")
-            else:
-                break
-
-    def set_simulation_state_to_idle(self, btsnumber):
-        """ Sets the simulation state to IDLE
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        if not isinstance(btsnumber, BtsNumber):
-            raise ValueError(' The parameter should be of type "BtsNumber" ')
-        cmd = "RESETSIMULATION IDLE," + btsnumber.value
-        self.send_command(cmd)
-        time_to_wait = 30
-        sleep_interval = 2
-        waiting_time = 0
-
-        self.log.info("Waiting for CALLSTAT=IDLE")
-        callstat = self.send_query("CALLSTAT?").split(",")
-        while callstat[0] != "IDLE":
-            time.sleep(sleep_interval)
-            waiting_time = waiting_time + sleep_interval
-            if waiting_time <= time_to_wait:
-                callstat = self.send_query("CALLSTAT?").split(",")
-            else:
-                break
-
-    def set_trigger_message_mode(self, msg_id):
-        """ Sets the Message Mode of the trigger
-
-        Args:
-            msg_id: The hex value of the identity of an RRC/NAS message.
-
-        Returns:
-            None
-        """
-
-        if isinstance(msg_id, TriggerMessageIDs):
-            msg_id = msg_id.value
-
-        cmd = "TMMESSAGEMODE {},USERDATA".format(msg_id)
-        self.send_command(cmd)
-
-    def set_data_of_trigger_message(self, msg_id, user_data):
-        """ Sets the User Data of the trigger message
-
-        Args:
-            msg_id: The hex value of the identity of an RRC/NAS message.
-            user_data: Hex data
-
-        Returns:
-            None
-        """
-
-        if isinstance(msg_id, TriggerMessageIDs):
-            msg_id = msg_id.value
-
-        data_len = len(user_data) * 4
-
-        cmd = "TMUSERDATA {}, {}, {}".format(msg_id, user_data, data_len)
-        self.send_command(cmd)
-
-    def send_trigger_message(self, msg_id):
-        """ Sends the User Data of the trigger information
-
-        Args:
-            msg_id: The hex value of the identity of an RRC/NAS message.
-
-        Returns:
-            None
-        """
-
-        if isinstance(msg_id, TriggerMessageIDs):
-            msg_id = msg_id.value
-
-        cmd = "TMSENDUSERMSG {}".format(msg_id)
-        self.send_command(cmd)
-
-    def wait_for_registration_state(self,
-                                    bts=1,
-                                    time_to_wait=REGISTRATION_STATE_WAIT_TIME):
-        """ Waits for UE registration state on Anritsu
-
-        Args:
-          bts: index of MD8475A BTS, eg 1, 2
-          time_to_wait: time to wait for the phone to get to registration state
-
-        Returns:
-            None
-        """
-        self.log.info("wait for IDLE/COMMUNICATION state on anritsu.")
-
-        sleep_interval = 1
-        sim_model = (self.get_simulation_model()).split(",")
-        # wait 1 more round for GSM because of PS attach
-        registration_check_iterations = 2 if sim_model[bts - 1] == "GSM" else 1
-        for _ in range(registration_check_iterations):
-            waiting_time = 0
-            while waiting_time <= time_to_wait:
-                callstat = self.send_query(
-                    "CALLSTAT? BTS{}".format(bts)).split(",")
-                if callstat[0] == "IDLE" or callstat[1] == "COMMUNICATION":
-                    break
-                time.sleep(sleep_interval)
-                waiting_time += sleep_interval
-            else:
-                raise AnritsuError(
-                    "UE failed to register in {} seconds".format(time_to_wait))
-            time.sleep(sleep_interval)
-
-    def wait_for_communication_state(
-            self, time_to_wait=COMMUNICATION_STATE_WAIT_TIME):
-        """ Waits for UE communication state on Anritsu
-
-        Args:
-          time_to_wait: time to wait for the phone to get to communication state
-
-        Returns:
-            None
-        """
-        self.log.info("wait for COMMUNICATION state on anritsu")
-        sleep_interval = 1
-        waiting_time = 0
-
-        self.log.info("Waiting for CALLSTAT=COMMUNICATION")
-        callstat = self.send_query("CALLSTAT? BTS1").split(",")
-        while callstat[1] != "COMMUNICATION":
-            time.sleep(sleep_interval)
-            waiting_time += sleep_interval
-            if waiting_time <= time_to_wait:
-                callstat = self.send_query("CALLSTAT? BTS1").split(",")
-            else:
-                raise AnritsuError("UE failed to register on network")
-
-    def wait_for_idle_state(self, time_to_wait=IDLE_STATE_WAIT_TIME):
-        """ Waits for UE idle state on Anritsu
-
-        Args:
-          time_to_wait: time to wait for the phone to get to idle state
-
-        Returns:
-            None
-        """
-        self.log.info("wait for IDLE state on anritsu.")
-
-        sleep_interval = 1
-        waiting_time = 0
-
-        callstat = self.send_query("CALLSTAT? BTS1").split(",")
-        while callstat[0] != "IDLE":
-            time.sleep(sleep_interval)
-            waiting_time += sleep_interval
-            if waiting_time <= time_to_wait:
-                callstat = self.send_query("CALLSTAT? BTS1").split(",")
-            else:
-                raise AnritsuError("UE failed to go on idle state")
-
-    def get_camping_cell(self):
-        """ Gets the current camping cell information
-
-        Args:
-          None
-
-        Returns:
-            returns a tuple (BTS number, RAT Technology) '
-        """
-        bts_number, rat_info = self.send_query("CAMPINGCELL?").split(",")
-        return bts_number, rat_info
-
-    def get_supported_bands(self, rat):
-        """ Gets the supported bands from UE capability information
-
-        Args:
-          rat: LTE or WCDMA
-
-        Returns:
-            returns a list of bnads
-        """
-        cmd = "UEINFO? "
-        if rat == "LTE":
-            cmd += "L"
-        elif rat == "WCDMA":
-            cmd += "W"
-        else:
-            raise ValueError('The rat argument needs to be "LTE" or "WCDMA"')
-        cmd += "_SupportedBand"
-        result = self.send_query(cmd).split(",")
-        if result == "NONE":
-            return None
-        if rat == "WCDMA":
-            bands = []
-            for band in result:
-                bands.append(WCDMA_BANDS[band])
-            return bands
-        else:
-            return result
-
-    def start_testcase(self):
-        """ Starts a test case on Anritsu
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        self.send_command("STARTTEST")
-
-    def get_testcase_status(self):
-        """ Gets the current test case status on Anritsu
-
-        Args:
-          None
-
-        Returns:
-            current test case status
-        """
-        return self.send_query("TESTSTAT?")
-
-    def start_ip_traffic(self, pdn='1'):
-        """ Starts IP data traffic with the selected PDN.
-
-        Args:
-            pdn: the pdn to be used for data traffic. Defaults to '1'.
-        """
-        self.send_command('OPERATEIPTRAFFIC START,' + pdn)
-
-    def stop_ip_traffic(self, pdn='1'):
-        """ Stops IP data traffic with the selected PDN.
-
-         Args:
-            pdn: pdn for which data traffic has to be stopped. Defaults to '1'.
-        """
-        self.send_command('OPERATEIPTRAFFIC STOP,' + pdn)
-
-    def set_carrier_aggregation_enabled(self, enabled=True):
-        """ Enables or disables de carrier aggregation option.
-
-        Args:
-            enabled: enables CA if True and disables CA if False.
-        """
-        cmd = 'CA ' + 'ENABLE' if enabled else 'DISABLE'
-        self.send_command(cmd)
-
-    # Common Default Gateway:
-    @property
-    def gateway_ipv4addr(self):
-        """ Gets the IPv4 address of the default gateway
-
-        Args:
-          None
-
-        Returns:
-            current UE status
-        """
-        return self.send_query("DGIPV4?")
-
-    @gateway_ipv4addr.setter
-    def gateway_ipv4addr(self, ipv4_addr):
-        """ sets the IPv4 address of the default gateway
-        Args:
-            ipv4_addr: IPv4 address of the default gateway
-
-        Returns:
-            None
-        """
-        cmd = "DGIPV4 " + ipv4_addr
-        self.send_command(cmd)
-
-    @property
-    def gateway_ipv6addr(self):
-        """ Gets the IPv6 address of the default gateway
-
-        Args:
-          None
-
-        Returns:
-            current UE status
-        """
-        return self.send_query("DGIPV6?")
-
-    @gateway_ipv6addr.setter
-    def gateway_ipv6addr(self, ipv6_addr):
-        """ sets the IPv6 address of the default gateway
-        Args:
-            ipv6_addr: IPv6 address of the default gateway
-
-        Returns:
-            None
-        """
-        cmd = "DGIPV6 " + ipv6_addr
-        self.send_command(cmd)
-
-    @property
-    def usim_key(self):
-        """ Gets the USIM Security Key
-
-        Args:
-          None
-
-        Returns:
-            USIM Security Key
-        """
-        return self.send_query("USIMK?")
-
-    @usim_key.setter
-    def usim_key(self, usimk):
-        """ sets the USIM Security Key
-        Args:
-            usimk: USIM Security Key, eg "000102030405060708090A0B0C0D0E0F"
-
-        Returns:
-            None
-        """
-        cmd = "USIMK " + usimk
-        self.send_command(cmd)
-
-    def get_ue_status(self):
-        """ Gets the current UE status on Anritsu
-
-        Args:
-          None
-
-        Returns:
-            current UE status
-        """
-        UE_STATUS_INDEX = 0
-        ue_status = self.send_query("CALLSTAT?").split(",")[UE_STATUS_INDEX]
-        return _PROCESS_STATES[ue_status]
-
-    def get_packet_status(self):
-        """ Gets the current Packet status on Anritsu
-
-        Args:
-          None
-
-        Returns:
-            current Packet status
-        """
-        PACKET_STATUS_INDEX = 1
-        packet_status = self.send_query("CALLSTAT?").split(
-            ",")[PACKET_STATUS_INDEX]
-        return _PROCESS_STATES[packet_status]
-
-    def disconnect(self):
-        """ Disconnect the Anritsu box from test PC
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        # no need to # exit smart studio application
-        # self.close_smartstudio()
-        self._sock.close()
-
-    def machine_reboot(self):
-        """ Reboots the Anritsu Machine
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        self.send_command("REBOOT")
-
-    def save_sequence_log(self, fileName):
-        """ Saves the Anritsu Sequence logs to file
-
-        Args:
-          fileName: log file name
-
-        Returns:
-            None
-        """
-        cmd = 'SAVESEQLOG "{}"'.format(fileName)
-        self.send_command(cmd)
-
-    def clear_sequence_log(self):
-        """ Clears the Anritsu Sequence logs
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        self.send_command("CLEARSEQLOG")
-
-    def save_message_log(self, fileName):
-        """ Saves the Anritsu Message logs to file
-
-        Args:
-          fileName: log file name
-
-        Returns:
-            None
-        """
-        cmd = 'SAVEMSGLOG "{}"'.format(fileName)
-        self.send_command(cmd)
-
-    def clear_message_log(self):
-        """ Clears the Anritsu Message logs
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        self.send_command("CLEARMSGLOG")
-
-    def save_trace_log(self, fileName, fileType, overwrite, start, end):
-        """ Saves the Anritsu Trace logs
-
-        Args:
-          fileName: log file name
-          fileType: file type (BINARY, TEXT, H245,PACKET, CPLABE)
-          overwrite: whether to over write
-          start: starting trace number
-          end: ending trace number
-
-        Returns:
-            None
-        """
-        cmd = 'SAVETRACELOG "{}",{},{},{},{}'.format(fileName, fileType,
-                                                     overwrite, start, end)
-        self.send_command(cmd)
-
-    def send_cmas_lte_wcdma(self, serialNo, messageID, warningMessage):
-        """ Sends a CMAS message
-
-        Args:
-          serialNo: serial number of CMAS message
-          messageID: CMAS message ID
-          warningMessage:  CMAS Warning message
-
-        Returns:
-            None
-        """
-        cmd = ('PWSSENDWM 3GPP,"BtsNo=1&WarningSystem=CMAS&SerialNo={}'
-               '&MessageID={}&wm={}"').format(serialNo, messageID,
-                                              warningMessage)
-        self.send_command(cmd)
-
-    def send_etws_lte_wcdma(self, serialNo, messageID, warningType,
-                            warningMessage, userAlertenable, popUpEnable):
-        """ Sends a ETWS message
-
-        Args:
-          serialNo: serial number of CMAS message
-          messageID: CMAS message ID
-          warningMessage:  CMAS Warning message
-
-        Returns:
-            None
-        """
-        cmd = (
-            'PWSSENDWM 3GPP,"BtsNo=1&WarningSystem=ETWS&SerialNo={}&'
-            'Primary=ON&PrimaryMessageID={}&Secondary=ON&SecondaryMessageID={}'
-            '&WarningType={}&wm={}&UserAlert={}&Popup={}&dcs=0x10&LanguageCode=en"'
-        ).format(serialNo, messageID, messageID, warningType, warningMessage,
-                 userAlertenable, popUpEnable)
-        self.send_command(cmd)
-
-    def send_cmas_etws_cdma1x(self, message_id, service_category, alert_ext,
-                              response_type, severity, urgency, certainty):
-        """ Sends a CMAS/ETWS message on CDMA 1X
-
-        Args:
-          serviceCategory: service category of alert
-          messageID: message ID
-          alertText: Warning message
-
-        Returns:
-            None
-        """
-        cmd = (
-            'PWSSENDWM 3GPP2,"BtsNo=1&ServiceCategory={}&MessageID={}&AlertText={}&'
-            'CharSet=ASCII&ResponseType={}&Severity={}&Urgency={}&Certainty={}"'
-        ).format(service_category, message_id, alert_ext, response_type,
-                 severity, urgency, certainty)
-        self.send_command(cmd)
-
-    @property
-    def csfb_type(self):
-        """ Gets the current CSFB type
-
-        Args:
-            None
-
-        Returns:
-            current CSFB type
-        """
-        return self.send_query("SIMMODELEX? CSFB")
-
-    @csfb_type.setter
-    def csfb_type(self, csfb_type):
-        """ sets the CSFB type
-        Args:
-            csfb_type: CSFB type
-
-        Returns:
-            None
-        """
-        if not isinstance(csfb_type, CsfbType):
-            raise ValueError('The parameter should be of type "CsfbType" ')
-        cmd = "SIMMODELEX CSFB," + csfb_type.value
-        self.send_command(cmd)
-
-    @property
-    def csfb_return_to_eutran(self):
-        """ Gets the current return to EUTRAN status
-
-        Args:
-            None
-
-        Returns:
-            current return to EUTRAN status
-        """
-        return self.send_query("SIMMODELEX? RETEUTRAN")
-
-    @csfb_return_to_eutran.setter
-    def csfb_return_to_eutran(self, enable):
-        """ sets the return to EUTRAN feature
-        Args:
-            enable: enable/disable return to EUTRAN feature
-
-        Returns:
-            None
-        """
-        if not isinstance(enable, ReturnToEUTRAN):
-            raise ValueError(
-                'The parameter should be of type "ReturnToEUTRAN"')
-        cmd = "SIMMODELEX RETEUTRAN," + enable.value
-        self.send_command(cmd)
-
-    def set_packet_preservation(self):
-        """ Set packet state to Preservation
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "OPERATEPACKET PRESERVATION"
-        self.send_command(cmd)
-
-    def set_packet_dormant(self):
-        """ Set packet state to Dormant
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "OPERATEPACKET DORMANT"
-        self.send_command(cmd)
-
-    def get_ue_identity(self, identity_type):
-        """ Get the UE identity IMSI, IMEI, IMEISV
-
-        Args:
-            identity_type : IMSI/IMEI/IMEISV
-
-        Returns:
-            IMSI/IMEI/IMEISV value
-        """
-        bts, rat = self.get_camping_cell()
-        if rat == BtsTechnology.LTE.value:
-            identity_request = TriggerMessageIDs.IDENTITY_REQUEST_LTE.value
-            if identity_type == UEIdentityType.IMSI:
-                userdata = IMSI_READ_USERDATA_LTE
-            elif identity_type == UEIdentityType.IMEI:
-                userdata = IMEI_READ_USERDATA_LTE
-            elif identity_type == UEIdentityType.IMEISV:
-                userdata = IMEISV_READ_USERDATA_LTE
-            else:
-                return None
-        elif rat == BtsTechnology.WCDMA.value:
-            identity_request = TriggerMessageIDs.IDENTITY_REQUEST_WCDMA.value
-            if identity_type == UEIdentityType.IMSI:
-                userdata = IMSI_READ_USERDATA_WCDMA
-            elif identity_type == UEIdentityType.IMEI:
-                userdata = IMEI_READ_USERDATA_WCDMA
-            elif identity_type == UEIdentityType.IMEISV:
-                userdata = IMEISV_READ_USERDATA_WCDMA
-            else:
-                return None
-        elif rat == BtsTechnology.GSM.value:
-            identity_request = TriggerMessageIDs.IDENTITY_REQUEST_GSM.value
-            if identity_type == UEIdentityType.IMSI:
-                userdata = IMSI_READ_USERDATA_GSM
-            elif identity_type == UEIdentityType.IMEI:
-                userdata = IMEI_READ_USERDATA_GSM
-            elif identity_type == UEIdentityType.IMEISV:
-                userdata = IMEISV_READ_USERDATA_GSM
-            else:
-                return None
-        else:
-            return None
-
-        self.send_command("TMMESSAGEMODE {},USERDATA".format(identity_request))
-        time.sleep(SETTLING_TIME)
-        self.send_command("TMUSERDATA {}, {}, {}".format(
-            identity_request, userdata, IDENTITY_REQ_DATA_LEN))
-        time.sleep(SETTLING_TIME)
-        self.send_command("TMSENDUSERMSG {}".format(identity_request))
-        time.sleep(WAIT_TIME_IDENTITY_RESPONSE)
-        # Go through sequence log and find the identity response message
-        target = '"{}"'.format(identity_type.value)
-        seqlog = self.send_query("SEQLOG?").split(",")
-        while (target not in seqlog):
-            index = int(seqlog[0]) - 1
-            if index < SEQ_LOG_MESSAGE_START_INDEX:
-                self.log.error("Can not find " + target)
-                return None
-            seqlog = self.send_query("SEQLOG? %d" % index).split(",")
-        return (seqlog[-1])
-
-    def trigger_ue_capability_enquiry(self, requested_bands):
-        """ Triggers LTE RRC UE capability enquiry from callbox.
-
-        Args:
-            requested_bands: User data in hex format
-        """
-        self.set_trigger_message_mode(TriggerMessageIDs.UE_CAPABILITY_ENQUIRY)
-        time.sleep(SETTLING_TIME)
-        self.set_data_of_trigger_message(
-            TriggerMessageIDs.UE_CAPABILITY_ENQUIRY, requested_bands)
-        time.sleep(SETTLING_TIME)
-        self.send_trigger_message(TriggerMessageIDs.UE_CAPABILITY_ENQUIRY)
-        time.sleep(SETTLING_TIME)
-
-    def get_measured_pusch_power(self):
-        """ Queries the PUSCH power.
-
-        Returns:
-            A string indicating PUSCH power in each input port.
-        """
-        return self.send_query("MONITOR? UL_PUSCH")
-
-    def select_usim(self, usim):
-        """ Select pre-defined Anritsu USIM models
-
-        Args:
-            usim: any of P0035Bx, P0135Ax, P0250Ax, P0260Ax
-
-        Returns:
-            None
-        """
-        cmd = "SELECTUSIM {}".format(usim)
-        self.send_command(cmd)
-
-
-class _AnritsuTestCases(object):
-    '''Class to interact with the MD8475 supported test procedures '''
-    def __init__(self, anritsu):
-        self._anritsu = anritsu
-        self.log = anritsu.log
-
-    @property
-    def procedure(self):
-        """ Gets the current Test Procedure type
-
-        Args:
-            None
-
-        Returns:
-            One of TestProcedure type values
-        """
-        return self._anritsu.send_query("TESTPROCEDURE?")
-
-    @procedure.setter
-    def procedure(self, procedure):
-        """ sets the Test Procedure type
-        Args:
-            procedure: One of TestProcedure type values
-
-        Returns:
-            None
-        """
-        if not isinstance(procedure, TestProcedure):
-            raise ValueError(
-                'The parameter should be of type "TestProcedure" ')
-        cmd = "TESTPROCEDURE " + procedure.value
-        self._anritsu.send_command(cmd)
-
-    @property
-    def bts_direction(self):
-        """ Gets the current Test direction
-
-         Args:
-            None
-
-        Returns:
-            Current Test direction eg:BTS2,BTS1
-        """
-        return self._anritsu.send_query("TESTBTSDIRECTION?")
-
-    @bts_direction.setter
-    def bts_direction(self, direction):
-        """ sets the Test direction  eg: BTS1 to BTS2 '''
-
-        Args:
-            direction: tuple (from-bts,to_bts) of type BtsNumber
-
-        Returns:
-            None
-        """
-        if not isinstance(direction, tuple) or len(direction) != 2:
-            raise ValueError("Pass a tuple with two items")
-        from_bts, to_bts = direction
-        if (isinstance(from_bts, BtsNumber) and isinstance(to_bts, BtsNumber)):
-            cmd = "TESTBTSDIRECTION {},{}".format(from_bts.value, to_bts.value)
-            self._anritsu.send_command(cmd)
-        else:
-            raise ValueError(' The parameters should be of type "BtsNumber" ')
-
-    @property
-    def registration_timeout(self):
-        """ Gets the current Test registration timeout
-
-        Args:
-            None
-
-        Returns:
-            Current test registration timeout value
-        """
-        return self._anritsu.send_query("TESTREGISTRATIONTIMEOUT?")
-
-    @registration_timeout.setter
-    def registration_timeout(self, timeout_value):
-        """ sets the Test registration timeout value
-        Args:
-            timeout_value: test registration timeout value
-
-        Returns:
-            None
-        """
-        cmd = "TESTREGISTRATIONTIMEOUT " + str(timeout_value)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def power_control(self):
-        """ Gets the power control enabled/disabled status for test case
-
-        Args:
-            None
-
-        Returns:
-            current power control enabled/disabled status
-        """
-        return self._anritsu.send_query("TESTPOWERCONTROL?")
-
-    @power_control.setter
-    def power_control(self, enable):
-        """ Sets the power control enabled/disabled status for test case
-
-        Args:
-            enable:  enabled/disabled
-
-        Returns:
-            None
-        """
-        if not isinstance(enable, TestPowerControl):
-            raise ValueError(' The parameter should be of type'
-                             ' "TestPowerControl" ')
-        cmd = "TESTPOWERCONTROL " + enable.value
-        self._anritsu.send_command(cmd)
-
-    @property
-    def measurement_LTE(self):
-        """ Checks measurement status for LTE test case
-
-        Args:
-            None
-
-        Returns:
-            Enabled/Disabled
-        """
-        return self._anritsu.send_query("TESTMEASUREMENT? LTE")
-
-    @measurement_LTE.setter
-    def measurement_LTE(self, enable):
-        """ Sets the measurement enabled/disabled status for LTE test case
-
-        Args:
-            enable:  enabled/disabled
-
-        Returns:
-            None
-        """
-        if not isinstance(enable, TestMeasurement):
-            raise ValueError(' The parameter should be of type'
-                             ' "TestMeasurement" ')
-        cmd = "TESTMEASUREMENT LTE," + enable.value
-        self._anritsu.send_command(cmd)
-
-    @property
-    def measurement_WCDMA(self):
-        """ Checks measurement status for WCDMA test case
-
-        Args:
-            None
-
-        Returns:
-            Enabled/Disabled
-        """
-        return self._anritsu.send_query("TESTMEASUREMENT? WCDMA")
-
-    @measurement_WCDMA.setter
-    def measurement_WCDMA(self, enable):
-        """ Sets the measurement enabled/disabled status for WCDMA test case
-
-        Args:
-            enable:  enabled/disabled
-
-        Returns:
-            None
-        """
-        if not isinstance(enable, TestMeasurement):
-            raise ValueError(' The parameter should be of type'
-                             ' "TestMeasurement" ')
-        cmd = "TESTMEASUREMENT WCDMA," + enable.value
-        self._anritsu.send_command(cmd)
-
-    @property
-    def measurement_TDSCDMA(self):
-        """ Checks measurement status for TDSCDMA test case
-
-        Args:
-            None
-
-        Returns:
-            Enabled/Disabled
-        """
-        return self._anritsu.send_query("TESTMEASUREMENT? TDSCDMA")
-
-    @measurement_TDSCDMA.setter
-    def measurement_WCDMA(self, enable):
-        """ Sets the measurement enabled/disabled status for TDSCDMA test case
-
-        Args:
-            enable:  enabled/disabled
-
-        Returns:
-            None
-        """
-        if not isinstance(enable, TestMeasurement):
-            raise ValueError(' The parameter should be of type'
-                             ' "TestMeasurement" ')
-        cmd = "TESTMEASUREMENT TDSCDMA," + enable.value
-        self._anritsu.send_command(cmd)
-
-    def set_pdn_targeteps(self, pdn_order, pdn_number=1):
-        """ Sets PDN to connect as a target when performing the
-           test case for packet handover
-
-        Args:
-            pdn_order:  PRIORITY/USER
-            pdn_number: Target PDN number
-
-        Returns:
-            None
-        """
-        cmd = "TESTPDNTARGETEPS " + pdn_order
-        if pdn_order == "USER":
-            cmd = cmd + "," + str(pdn_number)
-        self._anritsu.send_command(cmd)
-
-
-class _BaseTransceiverStation(object):
-    '''Class to interact different BTS supported by MD8475 '''
-    def __init__(self, anritsu, btsnumber):
-        if not isinstance(btsnumber, BtsNumber):
-            raise ValueError(' The parameter should be of type "BtsNumber" ')
-        self._bts_number = btsnumber.value
-        self._anritsu = anritsu
-        self.log = anritsu.log
-
-    @property
-    def output_level(self):
-        """ Gets the Downlink power of the cell
-
-        Args:
-            None
-
-        Returns:
-            DL Power level
-        """
-        cmd = "OLVL? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @output_level.setter
-    def output_level(self, level):
-        """ Sets the Downlink power of the cell
-
-        Args:
-            level: Power level
-
-        Returns:
-            None
-        """
-        counter = 1
-        while float(level) != float(self.output_level):
-            if counter > 3:
-                raise AnritsuError("Fail to set output level in 3 tries!")
-            cmd = "OLVL {},{}".format(level, self._bts_number)
-            self._anritsu.send_command(cmd)
-            counter += 1
-            time.sleep(1)
-
-    @property
-    def input_level(self):
-        """ Gets the reference power of the cell
-
-        Args:
-            None
-
-        Returns:
-            Reference Power level
-        """
-        cmd = "RFLVL? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @input_level.setter
-    def input_level(self, level):
-        """ Sets the reference power of the cell
-
-        Args:
-            level: Power level
-
-        Returns:
-            None
-        """
-        counter = 1
-        while float(level) != float(self.input_level):
-            if counter > 3:
-                raise AnritsuError("Fail to set intput level in 3 tries!")
-            cmd = "RFLVL {},{}".format(level, self._bts_number)
-            self._anritsu.send_command(cmd)
-            counter += 1
-            time.sleep(1)
-
-    @property
-    def band(self):
-        """ Gets the Band of the cell
-
-        Args:
-            None
-
-        Returns:
-            Cell band
-        """
-        cmd = "BAND? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @band.setter
-    def band(self, band):
-        """ Sets the Band of the cell
-
-        Args:
-            band: Band of the cell
-
-        Returns:
-            None
-        """
-        cmd = "BAND {},{}".format(band, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def transmode(self):
-        """ Gets the Transmission Mode of the cell
-
-        Args:
-            None
-
-        Returns:
-            Transmission mode
-        """
-        cmd = "TRANSMODE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @transmode.setter
-    def transmode(self, tm_mode):
-        """ Sets the TM of the cell
-
-        Args:
-            TM: TM of the cell
-
-        Returns:
-            None
-        """
-        cmd = "TRANSMODE {},{}".format(tm_mode, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def duplex_mode(self):
-        """ Gets the Duplex Mode of the cell
-
-        Args:
-            None
-
-        Returns:
-            Duplex mode
-        """
-        cmd = "DUPLEXMODE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @duplex_mode.setter
-    def duplex_mode(self, mode):
-        """ Sets the duplex mode for the cell
-
-        Args:
-            mode: string indicating FDD or TDD
-
-        Returns:
-            None
-        """
-        cmd = "DUPLEXMODE {},{}".format(mode, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def uldl_configuration(self):
-        """ Gets the UL/DL pattern configuration for TDD bands
-
-        Args:
-            None
-
-        Returns:
-            Configuration number
-        """
-        cmd = "ULDLCONFIGURATION? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @uldl_configuration.setter
-    def uldl_configuration(self, configuration):
-        """ Sets the UL/DL pattern configuration for TDD bands
-
-        Args:
-            configuration: configuration number, [ 0, 6 ] inclusive
-
-        Returns:
-            None
-
-        Raises:
-            ValueError: Frame structure has to be [ 0, 6 ] inclusive
-        """
-        if configuration not in range(0, 7):
-            raise ValueError("The frame structure configuration has to be a "
-                             "number between 0 and 6 inclusive")
-
-        cmd = "ULDLCONFIGURATION {},{}".format(configuration, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cfi(self):
-        """ Gets the Control Format Indicator for this base station.
-
-        Args:
-            None
-
-        Returns:
-            The CFI number.
-        """
-        cmd = "CFI? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @cfi.setter
-    def cfi(self, cfi):
-        """ Sets the Control Format Indicator for this base station.
-
-        Args:
-            cfi: one of BESTEFFORT, AUTO, 1, 2 or 3.
-
-        Returns:
-            None
-
-        Raises:
-            ValueError: if cfi's value is invalid
-        """
-
-        cfi = str(cfi)
-
-        valid_values = {'BESTEFFORT', 'AUTO', '1', '2', '3'}
-        if cfi not in valid_values:
-            raise ValueError('Valid values for CFI are %r' % valid_values)
-
-        cmd = "CFI {},{}".format(cfi, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def paging_duration(self):
-        """ Gets the paging cycle duration for this base station.
-
-        Args:
-            None
-
-        Returns:
-            The paging cycle duration in milliseconds.
-        """
-        cmd = "PCYCLE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @paging_duration.setter
-    def paging_duration(self, duration):
-        """ Sets the paging cycle duration for this base station.
-
-        Args:
-            duration: the paging cycle duration in milliseconds.
-
-        Returns:
-            None
-
-        Raises:
-            ValueError: if duration's value is invalid
-        """
-
-        duration = int(duration)
-
-        valid_values = {320, 640, 1280, 2560}
-        if duration not in valid_values:
-            raise ValueError('Valid values for the paging cycle duration are '
-                             '%r.' % valid_values)
-
-        cmd = "PCYCLE {},{}".format(duration, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def phich_resource(self):
-        """ Gets the PHICH Resource setting for this base station.
-
-        Args:
-            None
-
-        Returns:
-            The PHICH Resource setting.
-        """
-        cmd = "PHICHRESOURCE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @phich_resource.setter
-    def phich_resource(self, phich):
-        """ Sets the PHICH Resource setting for this base station.
-
-        Args:
-            phich: one of 1/6, 1/2, 1, 2.
-
-        Returns:
-            None
-
-        Raises:
-            ValueError: if phich's value is invalid
-        """
-
-        phich = str(phich)
-
-        valid_values = ['1/6', '1/2', '1', '2']
-        if phich not in valid_values:
-            raise ValueError('Valid values for PHICH Resource are %r' %
-                             valid_values)
-
-        cmd = "PHICHRESOURCE {},{}".format(phich, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def tdd_special_subframe(self):
-        """ Gets SPECIALSUBFRAME of cell.
-
-        Args:
-            None
-
-        Returns:
-            tdd_special_subframe: integer between 0,9 inclusive
-        """
-        cmd = "SPECIALSUBFRAME? " + self._bts_number
-        tdd_special_subframe = int(self._anritsu.send_query(cmd))
-        return tdd_special_subframe
-
-    @tdd_special_subframe.setter
-    def tdd_special_subframe(self, tdd_special_subframe):
-        """ Sets SPECIALSUBFRAME of cell.
-
-        Args:
-            tdd_special_subframe: int between 0,9 inclusive
-
-        Returns:
-            None
-
-        Raises:
-            ValueError: tdd_special_subframe has to be between 0,9 inclusive
-        """
-        if tdd_special_subframe not in range(0, 10):
-            raise ValueError("The special subframe config is not [0,9]")
-        cmd = "SPECIALSUBFRAME {},{}".format(tdd_special_subframe,
-                                             self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def dl_antenna(self):
-        """ Gets the DL ANTENNA count of the cell
-
-        Args:
-            None
-
-        Returns:
-            No of DL Antenna
-        """
-        cmd = "ANTENNAS? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @dl_antenna.setter
-    def dl_antenna(self, num_antenna):
-        """ Sets the DL ANTENNA of the cell
-
-        Args:
-            c: DL ANTENNA of the cell
-
-        Returns:
-            None
-        """
-        cmd = "ANTENNAS {},{}".format(num_antenna, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def bandwidth(self):
-        """ Gets the channel bandwidth of the cell
-
-        Args:
-            None
-
-        Returns:
-            channel bandwidth
-        """
-        cmd = "BANDWIDTH? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @bandwidth.setter
-    def bandwidth(self, bandwidth):
-        """ Sets the channel bandwidth of the cell
-
-        Args:
-            bandwidth: channel bandwidth  of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(bandwidth, BtsBandwidth):
-            raise ValueError(' The parameter should be of type "BtsBandwidth"')
-        cmd = "BANDWIDTH {},{}".format(bandwidth.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def dl_bandwidth(self):
-        """ Gets the downlink bandwidth of the cell
-
-        Args:
-            None
-
-        Returns:
-            downlink bandwidth
-        """
-        cmd = "DLBANDWIDTH? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @dl_bandwidth.setter
-    def dl_bandwidth(self, bandwidth):
-        """ Sets the downlink bandwidth of the cell
-
-        Args:
-            bandwidth: downlink bandwidth of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(bandwidth, BtsBandwidth):
-            raise ValueError(' The parameter should be of type "BtsBandwidth"')
-        cmd = "DLBANDWIDTH {},{}".format(bandwidth.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def ul_bandwidth(self):
-        """ Gets the uplink bandwidth of the cell
-
-        Args:
-            None
-
-        Returns:
-            uplink bandwidth
-        """
-        cmd = "ULBANDWIDTH? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @ul_bandwidth.setter
-    def ul_bandwidth(self, bandwidth):
-        """ Sets the uplink bandwidth of the cell
-
-        Args:
-            bandwidth: uplink bandwidth of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(bandwidth, BtsBandwidth):
-            raise ValueError(
-                ' The parameter should be of type "BtsBandwidth" ')
-        cmd = "ULBANDWIDTH {},{}".format(bandwidth.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def packet_rate(self):
-        """ Gets the packet rate of the cell
-
-        Args:
-            None
-
-        Returns:
-            packet rate
-        """
-        cmd = "PACKETRATE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @packet_rate.setter
-    def packet_rate(self, packetrate):
-        """ Sets the packet rate of the cell
-
-        Args:
-            packetrate: packet rate of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(packetrate, BtsPacketRate):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsPacketRate" ')
-        cmd = "PACKETRATE {},{}".format(packetrate.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def ul_windowsize(self):
-        """ Gets the uplink window size of the cell
-
-        Args:
-            None
-
-        Returns:
-            uplink window size
-        """
-        cmd = "ULWINSIZE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @ul_windowsize.setter
-    def ul_windowsize(self, windowsize):
-        """ Sets the uplink window size of the cell
-
-        Args:
-            windowsize: uplink window size of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(windowsize, BtsPacketWindowSize):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsPacketWindowSize" ')
-        cmd = "ULWINSIZE {},{}".format(windowsize.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def dl_windowsize(self):
-        """ Gets the downlink window size of the cell
-
-        Args:
-            None
-
-        Returns:
-            downlink window size
-        """
-        cmd = "DLWINSIZE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @dl_windowsize.setter
-    def dl_windowsize(self, windowsize):
-        """ Sets the downlink window size of the cell
-
-        Args:
-            windowsize: downlink window size of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(windowsize, BtsPacketWindowSize):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsPacketWindowSize" ')
-        cmd = "DLWINSIZE {},{}".format(windowsize.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def service_state(self):
-        """ Gets the service state of BTS
-
-        Args:
-            None
-
-        Returns:
-            service state IN/OUT
-        """
-        cmd = "OUTOFSERVICE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @service_state.setter
-    def service_state(self, service_state):
-        """ Sets the service state of BTS
-
-        Args:
-            service_state: service state of BTS , IN/OUT
-
-        Returns:
-            None
-        """
-        if not isinstance(service_state, BtsServiceState):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsServiceState" ')
-        cmd = "OUTOFSERVICE {},{}".format(service_state.value,
-                                          self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cell_barred(self):
-        """ Gets the Cell Barred state of the cell
-
-        Args:
-            None
-
-        Returns:
-            one of BtsCellBarred value
-        """
-        cmd = "CELLBARRED?" + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @cell_barred.setter
-    def cell_barred(self, barred_option):
-        """ Sets the Cell Barred state of the cell
-
-        Args:
-            barred_option: Cell Barred state of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(barred_option, BtsCellBarred):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsCellBarred" ')
-        cmd = "CELLBARRED {},{}".format(barred_option.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def accessclass_barred(self):
-        """ Gets the Access Class Barred state of the cell
-
-        Args:
-            None
-
-        Returns:
-            one of BtsAccessClassBarred value
-        """
-        cmd = "ACBARRED? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @accessclass_barred.setter
-    def accessclass_barred(self, barred_option):
-        """ Sets the Access Class Barred state of the cell
-
-        Args:
-            barred_option: Access Class Barred state of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(barred_option, BtsAccessClassBarred):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsAccessClassBarred" ')
-        cmd = "ACBARRED {},{}".format(barred_option.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def lteemergency_ac_barred(self):
-        """ Gets the LTE emergency Access Class Barred state of the cell
-
-        Args:
-            None
-
-        Returns:
-            one of BtsLteEmergencyAccessClassBarred value
-        """
-        cmd = "LTEEMERGENCYACBARRED? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @lteemergency_ac_barred.setter
-    def lteemergency_ac_barred(self, barred_option):
-        """ Sets the LTE emergency Access Class Barred state of the cell
-
-        Args:
-            barred_option: Access Class Barred state of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(barred_option, BtsLteEmergencyAccessClassBarred):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsLteEmergencyAccessClassBarred" ')
-        cmd = "LTEEMERGENCYACBARRED {},{}".format(barred_option.value,
-                                                  self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def mcc(self):
-        """ Gets the MCC of the cell
-
-        Args:
-            None
-
-        Returns:
-            MCC of the cell
-        """
-        cmd = "MCC? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @mcc.setter
-    def mcc(self, mcc_code):
-        """ Sets the MCC of the cell
-
-        Args:
-            mcc_code: MCC of the cell
-
-        Returns:
-            None
-        """
-        cmd = "MCC {},{}".format(mcc_code, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def mnc(self):
-        """ Gets the MNC of the cell
-
-        Args:
-            None
-
-        Returns:
-            MNC of the cell
-        """
-        cmd = "MNC? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @mnc.setter
-    def mnc(self, mnc_code):
-        """ Sets the MNC of the cell
-
-        Args:
-            mnc_code: MNC of the cell
-
-        Returns:
-            None
-        """
-        cmd = "MNC {},{}".format(mnc_code, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def nw_fullname_enable(self):
-        """ Gets the network full name enable status
-
-        Args:
-            None
-
-        Returns:
-            one of BtsNwNameEnable value
-        """
-        cmd = "NWFNAMEON? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @nw_fullname_enable.setter
-    def nw_fullname_enable(self, enable):
-        """ Sets the network full name enable status
-
-        Args:
-            enable: network full name enable status
-
-        Returns:
-            None
-        """
-        if not isinstance(enable, BtsNwNameEnable):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsNwNameEnable" ')
-        cmd = "NWFNAMEON {},{}".format(enable.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def nw_fullname(self):
-        """ Gets the network full name
-
-        Args:
-            None
-
-        Returns:
-            Network fulll name
-        """
-        cmd = "NWFNAME? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @nw_fullname.setter
-    def nw_fullname(self, fullname):
-        """ Sets the network full name
-
-        Args:
-            fullname: network full name
-
-        Returns:
-            None
-        """
-        cmd = "NWFNAME {},{}".format(fullname, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def nw_shortname_enable(self):
-        """ Gets the network short name enable status
-
-        Args:
-            None
-
-        Returns:
-            one of BtsNwNameEnable value
-        """
-        cmd = "NWSNAMEON? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @nw_shortname_enable.setter
-    def nw_shortname_enable(self, enable):
-        """ Sets the network short name enable status
-
-        Args:
-            enable: network short name enable status
-
-        Returns:
-            None
-        """
-        if not isinstance(enable, BtsNwNameEnable):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsNwNameEnable" ')
-        cmd = "NWSNAMEON {},{}".format(enable.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def nw_shortname(self):
-        """ Gets the network short name
-
-        Args:
-            None
-
-        Returns:
-            Network short name
-        """
-        cmd = "NWSNAME? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @nw_shortname.setter
-    def nw_shortname(self, shortname):
-        """ Sets the network short name
-
-        Args:
-            shortname: network short name
-
-        Returns:
-            None
-        """
-        cmd = "NWSNAME {},{}".format(shortname, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    def apply_parameter_changes(self):
-        """ apply the parameter changes at run time
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "APPLYPARAM"
-        self._anritsu.send_command(cmd)
-
-    @property
-    def wcdma_ctch(self):
-        """ Gets the WCDMA CTCH enable/disable status
-
-        Args:
-            None
-
-        Returns:
-            one of CTCHSetup values
-        """
-        cmd = "CTCHPARAMSETUP? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @wcdma_ctch.setter
-    def wcdma_ctch(self, enable):
-        """ Sets the WCDMA CTCH enable/disable status
-
-        Args:
-            enable: WCDMA CTCH enable/disable status
-
-        Returns:
-            None
-        """
-        cmd = "CTCHPARAMSETUP {},{}".format(enable.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def lac(self):
-        """ Gets the Location Area Code of the cell
-
-        Args:
-            None
-
-        Returns:
-            LAC value
-        """
-        cmd = "LAC? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @lac.setter
-    def lac(self, lac):
-        """ Sets the Location Area Code of the cell
-
-        Args:
-            lac: Location Area Code of the cell
-
-        Returns:
-            None
-        """
-        cmd = "LAC {},{}".format(lac, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def rac(self):
-        """ Gets the Routing Area Code of the cell
-
-        Args:
-            None
-
-        Returns:
-            RAC value
-        """
-        cmd = "RAC? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @rac.setter
-    def rac(self, rac):
-        """ Sets the Routing Area Code of the cell
-
-        Args:
-            rac: Routing Area Code of the cell
-
-        Returns:
-            None
-        """
-        cmd = "RAC {},{}".format(rac, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def dl_channel(self):
-        """ Gets the downlink channel number of the cell
-
-        Args:
-            None
-
-        Returns:
-            RAC value
-        """
-        cmd = "DLCHAN? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @dl_channel.setter
-    def dl_channel(self, channel):
-        """ Sets the downlink channel number of the cell
-
-        Args:
-            channel: downlink channel number of the cell
-
-        Returns:
-            None
-        """
-        cmd = "DLCHAN {},{}".format(channel, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def dl_cc_enabled(self):
-        """ Checks if component carrier is enabled or disabled
-
-        Args:
-            None
-
-        Returns:
-            True if enabled, False if disabled
-        """
-        return (self._anritsu.send_query("TESTDLCC?" +
-                                         self._bts_number) == "ENABLE")
-
-    @dl_cc_enabled.setter
-    def dl_cc_enabled(self, enabled):
-        """ Enables or disables the component carrier
-
-        Args:
-            enabled: True if it should be enabled, False if disabled
-
-        Returns:
-            None
-        """
-        cmd = "TESTDLCC {},{}".format("ENABLE" if enabled else "DISABLE",
-                                      self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def sector1_mcc(self):
-        """ Gets the sector 1 MCC of the CDMA cell
-
-        Args:
-            None
-
-        Returns:
-            sector 1 mcc
-        """
-        cmd = "S1MCC? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @sector1_mcc.setter
-    def sector1_mcc(self, mcc):
-        """ Sets the sector 1 MCC of the CDMA cell
-
-        Args:
-            mcc: sector 1 MCC of the CDMA cell
-
-        Returns:
-            None
-        """
-        cmd = "S1MCC {},{}".format(mcc, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def sector1_sid(self):
-        """ Gets the sector 1 system ID of the CDMA cell
-
-        Args:
-            None
-
-        Returns:
-            sector 1 system Id
-        """
-        cmd = "S1SID? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @sector1_sid.setter
-    def sector1_sid(self, sid):
-        """ Sets the sector 1 system ID of the CDMA cell
-
-        Args:
-            sid: sector 1 system ID of the CDMA cell
-
-        Returns:
-            None
-        """
-        cmd = "S1SID {},{}".format(sid, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def sector1_nid(self):
-        """ Gets the sector 1 network ID of the CDMA cell
-
-        Args:
-            None
-
-        Returns:
-            sector 1 network Id
-        """
-        cmd = "S1NID? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @sector1_nid.setter
-    def sector1_nid(self, nid):
-        """ Sets the sector 1 network ID of the CDMA cell
-
-        Args:
-            nid: sector 1 network ID of the CDMA cell
-
-        Returns:
-            None
-        """
-        cmd = "S1NID {},{}".format(nid, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def sector1_baseid(self):
-        """ Gets the sector 1 Base ID of the CDMA cell
-
-        Args:
-            None
-
-        Returns:
-            sector 1 Base Id
-        """
-        cmd = "S1BASEID? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @sector1_baseid.setter
-    def sector1_baseid(self, baseid):
-        """ Sets the sector 1 Base ID of the CDMA cell
-
-        Args:
-            baseid: sector 1 Base ID of the CDMA cell
-
-        Returns:
-            None
-        """
-        cmd = "S1BASEID {},{}".format(baseid, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def sector1_latitude(self):
-        """ Gets the sector 1 latitude of the CDMA cell
-
-        Args:
-            None
-
-        Returns:
-            sector 1 latitude
-        """
-        cmd = "S1LATITUDE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @sector1_latitude.setter
-    def sector1_latitude(self, latitude):
-        """ Sets the sector 1 latitude of the CDMA cell
-
-        Args:
-            latitude: sector 1 latitude of the CDMA cell
-
-        Returns:
-            None
-        """
-        cmd = "S1LATITUDE {},{}".format(latitude, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def sector1_longitude(self):
-        """ Gets the sector 1 longitude of the CDMA cell
-
-        Args:
-            None
-
-        Returns:
-            sector 1 longitude
-        """
-        cmd = "S1LONGITUDE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @sector1_longitude.setter
-    def sector1_longitude(self, longitude):
-        """ Sets the sector 1 longitude of the CDMA cell
-
-        Args:
-            longitude: sector 1 longitude of the CDMA cell
-
-        Returns:
-            None
-        """
-        cmd = "S1LONGITUDE {},{}".format(longitude, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def evdo_sid(self):
-        """ Gets the Sector ID of the EVDO cell
-
-        Args:
-            None
-
-        Returns:
-            Sector Id
-        """
-        cmd = "S1SECTORID? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @evdo_sid.setter
-    def evdo_sid(self, sid):
-        """ Sets the Sector ID of the EVDO cell
-
-        Args:
-            sid: Sector ID of the EVDO cell
-
-        Returns:
-            None
-        """
-        cmd = "S1SECTORID {},{}".format(sid, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cell_id(self):
-        """ Gets the cell identity of the cell
-
-        Args:
-            None
-
-        Returns:
-            cell identity
-        """
-        cmd = "CELLID? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @cell_id.setter
-    def cell_id(self, cell_id):
-        """ Sets the cell identity of the cell
-
-        Args:
-            cell_id: cell identity of the cell
-
-        Returns:
-            None
-        """
-        cmd = "CELLID {},{}".format(cell_id, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def physical_cellid(self):
-        """ Gets the physical cell id of the cell
-
-        Args:
-            None
-
-        Returns:
-            physical cell id
-        """
-        cmd = "PHYCELLID? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @physical_cellid.setter
-    def physical_cellid(self, physical_cellid):
-        """ Sets the physical cell id of the cell
-
-        Args:
-            physical_cellid: physical cell id of the cell
-
-        Returns:
-            None
-        """
-        cmd = "PHYCELLID {},{}".format(physical_cellid, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def gsm_mcs_dl(self):
-        """ Gets the Modulation and Coding scheme (DL) of the GSM cell
-
-        Args:
-            None
-
-        Returns:
-            DL MCS
-        """
-        cmd = "DLMCS? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @gsm_mcs_dl.setter
-    def gsm_mcs_dl(self, mcs_dl):
-        """ Sets the Modulation and Coding scheme (DL) of the GSM cell
-
-        Args:
-            mcs_dl: Modulation and Coding scheme (DL) of the GSM cell
-
-        Returns:
-            None
-        """
-        cmd = "DLMCS {},{}".format(mcs_dl, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def gsm_mcs_ul(self):
-        """ Gets the Modulation and Coding scheme (UL) of the GSM cell
-
-        Args:
-            None
-
-        Returns:
-            UL MCS
-        """
-        cmd = "ULMCS? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @gsm_mcs_ul.setter
-    def gsm_mcs_ul(self, mcs_ul):
-        """ Sets the Modulation and Coding scheme (UL) of the GSM cell
-
-        Args:
-            mcs_ul:Modulation and Coding scheme (UL) of the GSM cell
-
-        Returns:
-            None
-        """
-        cmd = "ULMCS {},{}".format(mcs_ul, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def lte_scheduling_mode(self):
-        """ Gets the Scheduling mode of the LTE cell
-
-        Args:
-            None
-
-        Returns:
-            Scheduling mode
-        """
-        cmd = "SCHEDULEMODE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @lte_scheduling_mode.setter
-    def lte_scheduling_mode(self, mode):
-        """ Sets the Scheduling mode of the LTE cell
-
-        Args:
-            mode: STATIC (default) or DYNAMIC
-
-        Returns:
-            None
-        """
-        counter = 1
-        while mode != self.lte_scheduling_mode:
-            if counter > 3:
-                raise AnritsuError("Fail to set scheduling mode in 3 tries!")
-            cmd = "SCHEDULEMODE {},{}".format(mode, self._bts_number)
-            self._anritsu.send_command(cmd)
-            counter += 1
-            time.sleep(1)
-
-    @property
-    def tbs_pattern(self):
-        """ Gets the TBS Pattern setting for the LTE cell
-
-        Args:
-            None
-
-        Returns:
-            TBS Pattern setting
-        """
-        cmd = "TBSPATTERN? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @tbs_pattern.setter
-    def tbs_pattern(self, pattern):
-        """ Sets the TBS Pattern setting for the LTE cell
-
-        Args:
-            mode: "FULLALLOCATION" or "OFF"
-
-        Returns:
-            None
-        """
-        cmd = "TBSPATTERN {}, {}".format(pattern, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def drx_connected_mode(self):
-        """ Gets the Connected DRX LTE cell parameter
-
-        Args:
-            None
-
-        Returns:
-            DRX connected mode (OFF, AUTO, MANUAL)
-        """
-        cmd = "DRXCONN? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @drx_connected_mode.setter
-    def drx_connected_mode(self, mode):
-        """  Sets the Connected DRX LTE cell parameter
-
-        Args:
-            mode: OFF, AUTO, MANUAL
-
-        Returns:
-            None
-        """
-        cmd = "DRXCONN {}, {}".format(mode, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def drx_on_duration_timer(self):
-        """ Gets the amount of PDCCH subframes to wait for data after
-            waking up from a DRX cycle
-
-        Args:
-            None
-
-        Returns:
-            DRX mode duration timer
-        """
-        cmd = "DRXDURATIONTIME? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @drx_on_duration_timer.setter
-    def drx_on_duration_timer(self, time):
-        """ Sets the amount of PDCCH subframes to wait for data after
-            waking up from a DRX cycle
-
-        Args:
-            timer: Amount of PDCCH subframes to wait for user data
-                to be transmitted
-
-        Returns:
-            None
-        """
-        cmd = "DRXDURATIONTIME PSF{}, {}".format(time, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def drx_inactivity_timer(self):
-        """ Gets the number of PDCCH subframes to wait before entering DRX mode
-
-        Args:
-            None
-
-        Returns:
-            DRX mode inactivity timer
-        """
-        cmd = "DRXINACTIVITYTIME? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @drx_inactivity_timer.setter
-    def drx_inactivity_timer(self, time):
-        """ Sets the number of PDCCH subframes to wait before entering DRX mode
-
-        Args:
-            timer: Length of the interval to wait
-
-        Returns:
-            None
-        """
-        cmd = "DRXINACTIVITYTIME PSF{}, {}".format(time, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def drx_retransmission_timer(self):
-        """ Gets the number of consecutive PDCCH subframes to wait
-        for retransmission
-
-        Args:
-            None
-
-        Returns:
-            Number of PDCCH subframes to wait for retransmission
-        """
-        cmd = "DRXRETRANSTIME? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @drx_retransmission_timer.setter
-    def drx_retransmission_timer(self, time):
-        """ Sets the number of consecutive PDCCH subframes to wait
-        for retransmission
-
-        Args:
-            time: Number of PDCCH subframes to wait
-            for retransmission
-
-        Returns:
-            None
-        """
-        cmd = "DRXRETRANSTIME PSF{}, {}".format(time, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def drx_long_cycle(self):
-        """ Gets the amount of subframes representing a DRX long cycle
-
-        Args:
-            None
-
-        Returns:
-            The amount of subframes representing one long DRX cycle.
-            One cycle consists of DRX sleep + DRX on duration
-        """
-        cmd = "DRXLONGCYCLE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @drx_long_cycle.setter
-    def drx_long_cycle(self, time):
-        """ Sets the amount of subframes representing a DRX long cycle
-
-        Args:
-            long_cycle: The amount of subframes representing one long DRX cycle.
-                One cycle consists of DRX sleep + DRX on duration
-
-        Returns:
-            None
-        """
-        cmd = "DRXLONGCYCLE SF{}, {}".format(time, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def drx_long_cycle_offset(self):
-        """ Gets the offset used to determine long cycle starting
-        subframe
-
-        Args:
-            None
-
-        Returns:
-            Long cycle offset
-        """
-        cmd = "DRXLONGCYCLESTARTOFFSET? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @drx_long_cycle_offset.setter
-    def drx_long_cycle_offset(self, offset):
-        """ Sets the offset used to determine long cycle starting
-        subframe
-
-        Args:
-            offset: Number in range 0...(long cycle - 1)
-        """
-        cmd = "DRXLONGCYCLESTARTOFFSET {}, {}".format(offset, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def lte_mcs_dl(self):
-        """ Gets the Modulation and Coding scheme (DL) of the LTE cell
-
-        Args:
-            None
-
-        Returns:
-            DL MCS
-        """
-        cmd = "DLIMCS? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @lte_mcs_dl.setter
-    def lte_mcs_dl(self, mcs_dl):
-        """ Sets the Modulation and Coding scheme (DL) of the LTE cell
-
-        Args:
-            mcs_dl: Modulation and Coding scheme (DL) of the LTE cell
-
-        Returns:
-            None
-        """
-        cmd = "DLIMCS {},{}".format(mcs_dl, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def lte_mcs_ul(self):
-        """ Gets the Modulation and Coding scheme (UL) of the LTE cell
-
-        Args:
-            None
-
-        Returns:
-            UL MCS
-        """
-        cmd = "ULIMCS? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @lte_mcs_ul.setter
-    def lte_mcs_ul(self, mcs_ul):
-        """ Sets the Modulation and Coding scheme (UL) of the LTE cell
-
-        Args:
-            mcs_ul: Modulation and Coding scheme (UL) of the LTE cell
-
-        Returns:
-            None
-        """
-        cmd = "ULIMCS {},{}".format(mcs_ul, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def lte_dl_modulation_order(self):
-        """ Gets the DL modulation order of the LTE cell
-
-        Args:
-            None
-
-        Returns:
-            The DL modulation order
-        """
-        cmd = "DLRMC_MOD? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @lte_dl_modulation_order.setter
-    def lte_dl_modulation_order(self, order):
-        """ Sets the DL modulation order of the LTE cell
-
-        Args:
-            order: the DL modulation order of the LTE cell
-
-        Returns:
-            None
-        """
-        if isinstance(order, ModulationType):
-            order = order.value
-        cmd = "DLRMC_MOD {},{}".format(order, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def lte_ul_modulation_order(self):
-        """ Gets the UL modulation order of the LTE cell
-
-        Args:
-            None
-
-        Returns:
-            The UL modulation order
-        """
-        cmd = "ULRMC_MOD? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @lte_ul_modulation_order.setter
-    def lte_ul_modulation_order(self, order):
-        """ Sets the UL modulation order of the LTE cell
-
-        Args:
-            order: the UL modulation order of the LTE cell
-
-        Returns:
-            None
-        """
-        if isinstance(order, ModulationType):
-            order = order.value
-        cmd = "ULRMC_MOD {},{}".format(order, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def nrb_dl(self):
-        """ Gets the Downlink N Resource Block of the cell
-
-        Args:
-            None
-
-        Returns:
-            Downlink NRB
-        """
-        cmd = "DLNRB? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @nrb_dl.setter
-    def nrb_dl(self, blocks):
-        """ Sets the Downlink N Resource Block of the cell
-
-        Args:
-            blocks: Downlink N Resource Block of the cell
-
-        Returns:
-            None
-        """
-        cmd = "DLNRB {},{}".format(blocks, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def nrb_ul(self):
-        """ Gets the uplink N Resource Block of the cell
-
-        Args:
-            None
-
-        Returns:
-            uplink NRB
-        """
-        cmd = "ULNRB? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @nrb_ul.setter
-    def nrb_ul(self, blocks):
-        """ Sets the uplink N Resource Block of the cell
-
-        Args:
-            blocks: uplink N Resource Block of the cell
-
-        Returns:
-            None
-        """
-        cmd = "ULNRB {},{}".format(blocks, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def max_nrb_ul(self):
-        ul_bandwidth = self.ul_bandwidth
-        if ul_bandwidth == 'SAMEASDL':
-            ul_bandwidth = self.dl_bandwidth
-        max_nrb = MAX_NRB_FOR_BANDWIDTH.get(ul_bandwidth, None)
-        if not max_nrb:
-            raise ValueError('Could not get maximum RB allocation'
-                             'for bandwidth: {}'.format(ul_bandwidth))
-        return max_nrb
-
-    @property
-    def mimo_support(self):
-        """ Gets the maximum supported MIMO mode for the LTE bases tation.
-
-        Returns:
-            the MIMO mode as a string
-        """
-        cmd = "LTEMIMO? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @mimo_support.setter
-    def mimo_support(self, mode):
-        """ Sets the maximum supported MIMO mode for the LTE base station.
-
-        Args:
-            mode: a string or an object of the LteMimoMode class.
-        """
-
-        if isinstance(mode, LteMimoMode):
-            mode = mode.value
-
-        cmd = "LTEMIMO {},{}".format(self._bts_number, mode)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def neighbor_cell_mode(self):
-        """ Gets the neighbor cell mode
-
-        Args:
-            None
-
-        Returns:
-            current neighbor cell mode
-        """
-        cmd = "NCLIST? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @neighbor_cell_mode.setter
-    def neighbor_cell_mode(self, mode):
-        """ Sets the neighbor cell mode
-
-        Args:
-            mode: neighbor cell mode , DEFAULT/ USERDATA
-
-        Returns:
-            None
-        """
-        cmd = "NCLIST {},{}".format(mode, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    def get_neighbor_cell_type(self, system, index):
-        """ Gets the neighbor cell type
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell type
-        """
-        cmd = "NCTYPE? {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def set_neighbor_cell_type(self, system, index, cell_type):
-        """ Sets the neighbor cell type
-
-        Args:
-            system: simulation model of neighbor cell
-                   LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-            cell_type: cell type
-                BTS1, BTS2, BTS3, BTS4,CELLNAME, DISABLE
-
-        Returns:
-            None
-        """
-        cmd = "NCTYPE {},{},{},{}".format(system, index, cell_type,
-                                          self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    def get_neighbor_cell_name(self, system, index):
-        """ Gets the neighbor cell name
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell name
-        """
-        cmd = "NCCELLNAME? {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def set_neighbor_cell_name(self, system, index, name):
-        """ Sets the neighbor cell name
-
-        Args:
-            system: simulation model of neighbor cell
-                   LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-            name: cell name
-
-        Returns:
-            None
-        """
-        cmd = "NCCELLNAME {},{},{},{}".format(system, index, name,
-                                              self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    def get_neighbor_cell_mcc(self, system, index):
-        """ Gets the neighbor cell mcc
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell mcc
-        """
-        cmd = "NCMCC? {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_mnc(self, system, index):
-        """ Gets the neighbor cell mnc
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell mnc
-        """
-        cmd = "NCMNC? {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_id(self, system, index):
-        """ Gets the neighbor cell id
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell id
-        """
-        cmd = "NCCELLID? {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_tac(self, system, index):
-        """ Gets the neighbor cell tracking area code
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell tracking area code
-        """
-        cmd = "NCTAC? {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_dl_channel(self, system, index):
-        """ Gets the neighbor cell downlink channel
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell tracking downlink channel
-        """
-        cmd = "NCDLCHAN? {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_dl_bandwidth(self, system, index):
-        """ Gets the neighbor cell downlink bandwidth
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell tracking downlink bandwidth
-        """
-        cmd = "NCDLBANDWIDTH {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_pcid(self, system, index):
-        """ Gets the neighbor cell physical cell id
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell physical cell id
-        """
-        cmd = "NCPHYCELLID {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_lac(self, system, index):
-        """ Gets the neighbor cell location area code
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell location area code
-        """
-        cmd = "NCLAC {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_rac(self, system, index):
-        """ Gets the neighbor cell routing area code
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell routing area code
-        """
-        cmd = "NCRAC {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    @property
-    def primary_scrambling_code(self):
-        """ Gets the primary scrambling code for WCDMA cell
-
-        Args:
-            None
-
-        Returns:
-            primary scrambling code
-        """
-        cmd = "PRISCRCODE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @primary_scrambling_code.setter
-    def primary_scrambling_code(self, psc):
-        """ Sets the primary scrambling code for WCDMA cell
-
-        Args:
-            psc: primary scrambling code
-
-        Returns:
-            None
-        """
-        cmd = "PRISCRCODE {},{}".format(psc, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def tac(self):
-        """ Gets the Tracking Area Code of the LTE cell
-
-        Args:
-            None
-
-        Returns:
-            Tracking Area Code of the LTE cell
-        """
-        cmd = "TAC? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @tac.setter
-    def tac(self, tac):
-        """ Sets the Tracking Area Code of the LTE cell
-
-        Args:
-            tac: Tracking Area Code of the LTE cell
-
-        Returns:
-            None
-        """
-        cmd = "TAC {},{}".format(tac, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cell(self):
-        """ Gets the current cell for BTS
-
-        Args:
-            None
-
-        Returns:
-            current cell for BTS
-        """
-        cmd = "CELLSEL? {}".format(self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    @cell.setter
-    def cell(self, cell_name):
-        """ sets the  cell for BTS
-        Args:
-            cell_name: cell name
-
-        Returns:
-            None
-        """
-        cmd = "CELLSEL {},{}".format(self._bts_number, cell_name)
-        return self._anritsu.send_command(cmd)
-
-    @property
-    def gsm_cbch(self):
-        """ Gets the GSM CBCH enable/disable status
-
-        Args:
-            None
-
-        Returns:
-            one of CBCHSetup values
-        """
-        cmd = "CBCHPARAMSETUP? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @gsm_cbch.setter
-    def gsm_cbch(self, enable):
-        """ Sets the GSM CBCH enable/disable status
-
-        Args:
-            enable: GSM CBCH enable/disable status
-
-        Returns:
-            None
-        """
-        cmd = "CBCHPARAMSETUP {},{}".format(enable.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def gsm_gprs_mode(self):
-        """ Gets the GSM connection mode
-
-        Args:
-            None
-
-        Returns:
-            A string indicating if connection is EGPRS, GPRS or non-GPRS
-        """
-        cmd = "GPRS? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @gsm_gprs_mode.setter
-    def gsm_gprs_mode(self, mode):
-        """ Sets the GPRS connection mode
-
-        Args:
-            mode: GPRS connection mode
-
-        Returns:
-            None
-        """
-
-        if not isinstance(mode, BtsGprsMode):
-            raise ValueError(' The parameter should be of type "BtsGprsMode"')
-        cmd = "GPRS {},{}".format(mode.value, self._bts_number)
-
-        self._anritsu.send_command(cmd)
-
-    @property
-    def gsm_slots(self):
-        """ Gets the GSM slot assignment
-
-        Args:
-            None
-
-        Returns:
-            A tuple indicating DL and UL slots.
-        """
-
-        cmd = "MLTSLTCFG? " + self._bts_number
-
-        response = self._anritsu.send_query(cmd)
-        split_response = response.split(',')
-
-        if not len(split_response) == 2:
-            raise ValueError(response)
-
-        return response[0], response[1]
-
-    @gsm_slots.setter
-    def gsm_slots(self, slots):
-        """ Sets the number of downlink / uplink slots for GSM
-
-        Args:
-            slots: a tuple containing two ints indicating (DL,UL)
-
-        Returns:
-            None
-        """
-
-        try:
-            dl, ul = slots
-            dl = int(dl)
-            ul = int(ul)
-        except:
-            raise ValueError(
-                'The parameter slot has to be a tuple containing two ints '
-                'indicating (dl,ul) slots.')
-
-        # Validate
-        if dl < 1 or ul < 1 or dl + ul > 5:
-            raise ValueError(
-                'DL and UL slots have to be >= 1 and the sum <= 5.')
-
-        cmd = "MLTSLTCFG {},{},{}".format(dl, ul, self._bts_number)
-
-        self._anritsu.send_command(cmd)
-
-
-class _VirtualPhone(object):
-    '''Class to interact with virtual phone supported by MD8475 '''
-    def __init__(self, anritsu):
-        self._anritsu = anritsu
-        self.log = anritsu.log
-
-    @property
-    def id(self):
-        """ Gets the virtual phone ID
-
-        Args:
-            None
-
-        Returns:
-            virtual phone ID
-        """
-        cmd = "VPID? "
-        return self._anritsu.send_query(cmd)
-
-    @id.setter
-    def id(self, phonenumber):
-        """ Sets the virtual phone ID
-
-        Args:
-            phonenumber: virtual phone ID
-
-        Returns:
-            None
-        """
-        cmd = "VPID {}".format(phonenumber)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def id_c2k(self):
-        """ Gets the virtual phone ID for CDMA 1x
-
-        Args:
-            None
-
-        Returns:
-            virtual phone ID
-        """
-        cmd = "VPIDC2K? "
-        return self._anritsu.send_query(cmd)
-
-    @id_c2k.setter
-    def id_c2k(self, phonenumber):
-        """ Sets the virtual phone ID for CDMA 1x
-
-        Args:
-            phonenumber: virtual phone ID
-
-        Returns:
-            None
-        """
-        cmd = "VPIDC2K {}".format(phonenumber)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def auto_answer(self):
-        """ Gets the auto answer status of virtual phone
-
-        Args:
-            None
-
-        Returns:
-            auto answer status, ON/OFF
-        """
-        cmd = "VPAUTOANSWER? "
-        return self._anritsu.send_query(cmd)
-
-    @auto_answer.setter
-    def auto_answer(self, option):
-        """ Sets the auto answer feature
-
-        Args:
-            option: tuple with two items for turning on Auto Answer
-                    (OFF or (ON, timetowait))
-
-        Returns:
-            None
-        """
-        enable = "OFF"
-        time = 5
-
-        try:
-            enable, time = option
-        except ValueError:
-            if enable != "OFF":
-                raise ValueError("Pass a tuple with two items for"
-                                 " Turning on Auto Answer")
-        cmd = "VPAUTOANSWER {},{}".format(enable.value, time)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def calling_mode(self):
-        """ Gets the calling mode of virtual phone
-
-        Args:
-            None
-
-        Returns:
-            calling mode of virtual phone
-        """
-        cmd = "VPCALLINGMODE? "
-        return self._anritsu.send_query(cmd)
-
-    @calling_mode.setter
-    def calling_mode(self, calling_mode):
-        """ Sets the calling mode of virtual phone
-
-        Args:
-            calling_mode: calling mode of virtual phone
-
-        Returns:
-            None
-        """
-        cmd = "VPCALLINGMODE {}".format(calling_mode)
-        self._anritsu.send_command(cmd)
-
-    def set_voice_off_hook(self):
-        """ Set the virtual phone operating mode to Voice Off Hook
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "OPERATEVPHONE 0"
-        return self._anritsu.send_command(cmd)
-
-    def set_voice_on_hook(self):
-        """ Set the virtual phone operating mode to Voice On Hook
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "OPERATEVPHONE 1"
-        return self._anritsu.send_command(cmd)
-
-    def set_video_off_hook(self):
-        """ Set the virtual phone operating mode to Video Off Hook
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "OPERATEVPHONE 2"
-        return self._anritsu.send_command(cmd)
-
-    def set_video_on_hook(self):
-        """ Set the virtual phone operating mode to Video On Hook
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "OPERATEVPHONE 3"
-        return self._anritsu.send_command(cmd)
-
-    def set_call_waiting(self):
-        """ Set the virtual phone operating mode to Call waiting
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "OPERATEVPHONE 4"
-        return self._anritsu.send_command(cmd)
-
-    @property
-    def status(self):
-        """ Gets the virtual phone status
-
-        Args:
-            None
-
-        Returns:
-            virtual phone status
-        """
-        cmd = "VPSTAT?"
-        status = self._anritsu.send_query(cmd)
-        return _VP_STATUS[status]
-
-    def sendSms(self, phoneNumber, message):
-        """ Sends the SMS data from Anritsu to UE
-
-        Args:
-            phoneNumber: sender of SMS
-            message: message text
-
-        Returns:
-            None
-        """
-        cmd = ("SENDSMS /?PhoneNumber=001122334455&Sender={}&Text={}"
-               "&DCS=00").format(phoneNumber, AnritsuUtils.gsm_encode(message))
-        return self._anritsu.send_command(cmd)
-
-    def sendSms_c2k(self, phoneNumber, message):
-        """ Sends the SMS data from Anritsu to UE (in CDMA)
-
-        Args:
-            phoneNumber: sender of SMS
-            message: message text
-
-        Returns:
-            None
-        """
-        cmd = ("C2KSENDSMS System=CDMA\&Originating_Address={}\&UserData={}"
-               ).format(phoneNumber, AnritsuUtils.cdma_encode(message))
-        return self._anritsu.send_command(cmd)
-
-    def receiveSms(self):
-        """ Receives SMS messages sent by the UE in an external application
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        return self._anritsu.send_query("RECEIVESMS?")
-
-    def receiveSms_c2k(self):
-        """ Receives SMS messages sent by the UE(in CDMA) in an external application
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        return self._anritsu.send_query("C2KRECEIVESMS?")
-
-    def setSmsStatusReport(self, status):
-        """ Set the Status Report value of the SMS
-
-        Args:
-            status: status code
-
-        Returns:
-            None
-        """
-        cmd = "SMSSTATUSREPORT {}".format(status)
-        return self._anritsu.send_command(cmd)
-
-
-class _PacketDataNetwork(object):
-    '''Class to configure PDN parameters'''
-    def __init__(self, anritsu, pdnnumber):
-        self._pdn_number = pdnnumber
-        self._anritsu = anritsu
-        self.log = anritsu.log
-
-    # Default Gateway Selection
-    @property
-    def pdn_DG_selection(self):
-        """ Gets the default gateway for the PDN
-
-        Args:
-          None
-
-        Returns:
-          Current UE status
-        """
-        cmd = "PDNDEFAULTGATEWAY? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @pdn_DG_selection.setter
-    def pdn_DG_selection(self, selection):
-        """ Sets the default gateway selection for the PDN
-
-        Args:
-          Selection: COMMON or USER
-
-        Returns:
-          None
-        """
-        cmd = "PDNDEFAULTGATEWAY {},{}".format(self._pdn_number, selection)
-        self._anritsu.send_command(cmd)
-
-    # PDN specific Default Gateway:
-    @property
-    def pdn_gateway_ipv4addr(self):
-        """ Gets the IPv4 address of the default gateway
-
-        Args:
-          None
-
-        Returns:
-            current UE status
-        """
-        cmd = "PDNDGIPV4? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @pdn_gateway_ipv4addr.setter
-    def pdn_gateway_ipv4addr(self, ipv4_addr):
-        """ sets the IPv4 address of the default gateway
-
-        Args:
-            ipv4_addr: IPv4 address of the default gateway
-
-        Returns:
-            None
-        """
-        cmd = "PDNDGIPV4 {},{}".format(self._pdn_number, ipv4_addr)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def pdn_gateway_ipv6addr(self):
-        """ Gets the IPv6 address of the default gateway
-
-        Args:
-          None
-
-        Returns:
-            current UE status
-        """
-        cmd = "PDNDGIPV6? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @pdn_gateway_ipv6addr.setter
-    def pdn_gateway_ipv6addr(self, ipv6_addr):
-        """ sets the IPv6 address of the default gateway
-
-        Args:
-            ipv6_addr: IPv6 address of the default gateway
-
-        Returns:
-            None
-        """
-        cmd = "PDNDGIPV6 {},{}".format(self._pdn_number, ipv6_addr)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def ue_address_iptype(self):
-        """ Gets IP type of UE for particular PDN
-
-        Args:
-            None
-
-        Returns:
-            IP type of UE for particular PDN
-        """
-        cmd = "PDNIPTYPE? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @ue_address_iptype.setter
-    def ue_address_iptype(self, ip_type):
-        """ Set IP type of UE for particular PDN
-
-        Args:
-            ip_type: IP type of UE
-
-        Returns:
-            None
-        """
-        if not isinstance(ip_type, IPAddressType):
-            raise ValueError(
-                ' The parameter should be of type "IPAddressType"')
-        cmd = "PDNIPTYPE {},{}".format(self._pdn_number, ip_type.value)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def ue_address_ipv4(self):
-        """ Gets UE IPv4 address
-
-        Args:
-            None
-
-        Returns:
-            UE IPv4 address
-        """
-        cmd = "PDNIPV4? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @ue_address_ipv4.setter
-    def ue_address_ipv4(self, ip_address):
-        """ Set UE IPv4 address
-
-        Args:
-            ip_address: UE IPv4 address
-
-        Returns:
-            None
-        """
-        cmd = "PDNIPV4 {},{}".format(self._pdn_number, ip_address)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def ue_address_ipv6(self):
-        """ Gets UE IPv6 address
-
-        Args:
-            None
-
-        Returns:
-            UE IPv6 address
-        """
-        cmd = "PDNIPV6? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @ue_address_ipv6.setter
-    def ue_address_ipv6(self, ip_address):
-        """ Set UE IPv6 address
-
-        Args:
-            ip_address: UE IPv6 address
-
-        Returns:
-            None
-        """
-        cmd = "PDNIPV6 {},{}".format(self._pdn_number, ip_address)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def primary_dns_address_ipv4(self):
-        """ Gets Primary DNS server IPv4 address
-
-        Args:
-            None
-
-        Returns:
-            Primary DNS server IPv4 address
-        """
-        cmd = "PDNDNSIPV4PRI? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @primary_dns_address_ipv4.setter
-    def primary_dns_address_ipv4(self, ip_address):
-        """ Set Primary DNS server IPv4 address
-
-        Args:
-            ip_address: Primary DNS server IPv4 address
-
-        Returns:
-            None
-        """
-        cmd = "PDNDNSIPV4PRI {},{}".format(self._pdn_number, ip_address)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def secondary_dns_address_ipv4(self):
-        """ Gets secondary DNS server IPv4 address
-
-        Args:
-            None
-
-        Returns:
-            secondary DNS server IPv4 address
-        """
-        cmd = "PDNDNSIPV4SEC? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @secondary_dns_address_ipv4.setter
-    def secondary_dns_address_ipv4(self, ip_address):
-        """ Set secondary DNS server IPv4 address
-
-        Args:
-            ip_address: secondary DNS server IPv4 address
-
-        Returns:
-            None
-        """
-        cmd = "PDNDNSIPV4SEC {},{}".format(self._pdn_number, ip_address)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def dns_address_ipv6(self):
-        """ Gets DNS server IPv6 address
-
-        Args:
-            None
-
-        Returns:
-            DNS server IPv6 address
-        """
-        cmd = "PDNDNSIPV6? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @dns_address_ipv6.setter
-    def dns_address_ipv6(self, ip_address):
-        """ Set DNS server IPv6 address
-
-        Args:
-            ip_address: DNS server IPv6 address
-
-        Returns:
-            None
-        """
-        cmd = "PDNDNSIPV6 {},{}".format(self._pdn_number, ip_address)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cscf_address_ipv4(self):
-        """ Gets Secondary P-CSCF IPv4 address
-
-        Args:
-            None
-
-        Returns:
-            Secondary P-CSCF IPv4 address
-        """
-        cmd = "PDNPCSCFIPV4? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @cscf_address_ipv4.setter
-    def cscf_address_ipv4(self, ip_address):
-        """ Set Secondary P-CSCF IPv4 address
-
-        Args:
-            ip_address: Secondary P-CSCF IPv4 address
-
-        Returns:
-            None
-        """
-        cmd = "PDNPCSCFIPV4 {},{}".format(self._pdn_number, ip_address)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cscf_address_ipv6(self):
-        """ Gets P-CSCF IPv6 address
-
-        Args:
-            None
-
-        Returns:
-            P-CSCF IPv6 address
-        """
-        cmd = "PDNPCSCFIPV6? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @cscf_address_ipv6.setter
-    def cscf_address_ipv6(self, ip_address):
-        """ Set P-CSCF IPv6 address
-
-        Args:
-            ip_address: P-CSCF IPv6 address
-
-        Returns:
-            None
-        """
-        cmd = "PDNPCSCFIPV6 {},{}".format(self._pdn_number, ip_address)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def pdn_ims(self):
-        """ Get PDN IMS VNID binding status
-
-        Args:
-            None
-
-        Returns:
-            PDN IMS VNID binding status
-        """
-        cmd = "PDNIMS? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @pdn_ims.setter
-    def pdn_ims(self, switch):
-        """ Set PDN IMS VNID binding Enable/Disable
-
-        Args:
-            switch: "ENABLE/DISABLE"
-
-        Returns:
-            None
-        """
-        if not isinstance(switch, Switch):
-            raise ValueError(' The parameter should be of type'
-                             ' "Switch", ie, ENABLE or DISABLE ')
-        cmd = "PDNIMS {},{}".format(self._pdn_number, switch.value)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def pdn_vnid(self):
-        """ Get PDN IMS VNID
-
-        Args:
-            None
-
-        Returns:
-            PDN IMS VNID
-        """
-        cmd = "PDNVNID? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @pdn_vnid.setter
-    def pdn_vnid(self, vnid):
-        """ Set PDN IMS VNID
-
-        Args:
-            vnid: 1~99
-
-        Returns:
-            None
-        """
-        cmd = "PDNVNID {},{}".format(self._pdn_number, vnid)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def pdn_apn_name(self):
-        """ Get PDN APN NAME
-
-        Args:
-            None
-
-        Returns:
-            PDN APN NAME
-        """
-        cmd = "PDNCHECKAPN? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @pdn_apn_name.setter
-    def pdn_apn_name(self, name):
-        """ Set PDN APN NAME
-
-        Args:
-            name: fast.t-mobile.com, ims
-
-        Returns:
-            None
-        """
-        cmd = "PDNCHECKAPN {},{}".format(self._pdn_number, name)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def pdn_qci(self):
-        """ Get PDN QCI Value
-
-        Args:
-            None
-
-        Returns:
-            PDN QCI Value
-        """
-        cmd = "PDNQCIDEFAULT? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @pdn_qci.setter
-    def pdn_qci(self, qci_value):
-        """ Set PDN QCI Value
-
-        Args:
-            qci_value: 5, 9
-
-        Returns:
-            None
-        """
-        cmd = "PDNQCIDEFAULT {},{}".format(self._pdn_number, qci_value)
-        self._anritsu.send_command(cmd)
-
-
-class _TriggerMessage(object):
-    '''Class to interact with trigger message handling supported by MD8475 '''
-    def __init__(self, anritsu):
-        self._anritsu = anritsu
-        self.log = anritsu.log
-
-    def set_reply_type(self, message_id, reply_type):
-        """ Sets the reply type of the trigger information
-
-        Args:
-            message_id: trigger information message Id
-            reply_type: reply type of the trigger information
-
-        Returns:
-            None
-        """
-        if not isinstance(message_id, TriggerMessageIDs):
-            raise ValueError(' The parameter should be of type'
-                             ' "TriggerMessageIDs"')
-        if not isinstance(reply_type, TriggerMessageReply):
-            raise ValueError(' The parameter should be of type'
-                             ' "TriggerMessageReply"')
-
-        cmd = "REJECTTYPE {},{}".format(message_id.value, reply_type.value)
-        self._anritsu.send_command(cmd)
-
-    def set_reject_cause(self, message_id, cause):
-        """ Sets the reject cause of the trigger information
-
-        Args:
-            message_id: trigger information message Id
-            cause: cause for reject
-
-        Returns:
-            None
-        """
-        if not isinstance(message_id, TriggerMessageIDs):
-            raise ValueError(' The parameter should be of type'
-                             ' "TriggerMessageIDs"')
-
-        cmd = "REJECTCAUSE {},{}".format(message_id.value, cause)
-        self._anritsu.send_command(cmd)
-
-
-class _IMS_Services(object):
-    '''Class to configure and operate IMS Services'''
-    def __init__(self, anritsu, vnid):
-        self._vnid = vnid
-        self._anritsu = anritsu
-        self.log = anritsu.log
-
-    @property
-    def sync(self):
-        """ Gets Sync Enable status
-
-        Args:
-            None
-
-        Returns:
-            VNID Sync Enable status
-        """
-        cmd = "IMSSYNCENABLE? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @sync.setter
-    def sync(self, switch):
-        """ Set Sync Enable or Disable
-
-        Args:
-            sync: ENABLE/DISABLE
-
-        Returns:
-            None
-        """
-        if not isinstance(switch, Switch):
-            raise ValueError(' The parameter should be of type "Switch"')
-        cmd = "IMSSYNCENABLE {},{}".format(self._vnid, switch.value)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cscf_address_ipv4(self):
-        """ Gets CSCF IPv4 address
-
-        Args:
-            None
-
-        Returns:
-            CSCF IPv4 address
-        """
-        cmd = "IMSCSCFIPV4? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @cscf_address_ipv4.setter
-    def cscf_address_ipv4(self, ip_address):
-        """ Set CSCF IPv4 address
-
-        Args:
-            ip_address: CSCF IPv4 address
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFIPV4 {},{}".format(self._vnid, ip_address)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cscf_address_ipv6(self):
-        """ Gets CSCF IPv6 address
-
-        Args:
-            None
-
-        Returns:
-            CSCF IPv6 address
-        """
-        cmd = "IMSCSCFIPV6? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @cscf_address_ipv6.setter
-    def cscf_address_ipv6(self, ip_address):
-        """ Set CSCF IPv6 address
-
-        Args:
-            ip_address: CSCF IPv6 address
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFIPV6 {},{}".format(self._vnid, ip_address)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def imscscf_iptype(self):
-        """ Gets CSCF IP Type
-
-        Args:
-            None
-
-        Returns:
-            CSCF IP Type
-        """
-        cmd = "IMSCSCFIPTYPE? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @imscscf_iptype.setter
-    def imscscf_iptype(self, iptype):
-        """ Set CSCF IP Type
-
-        Args:
-            iptype: IPV4, IPV6, IPV4V6
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFIPTYPE {},{}".format(self._vnid, iptype)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cscf_monitoring_ua(self):
-        """ Get CSCF Monitoring UA URI
-
-        Args:
-            None
-
-        Returns:
-            CSCF Monitoring UA URI
-        """
-        cmd = "IMSCSCFUAURI? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @cscf_monitoring_ua.setter
-    def cscf_monitoring_ua(self, ua_uri):
-        """ Set CSCF Monitoring UA URI
-
-        Args:
-            ua_uri: CSCF Monitoring UA URI
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFUAURI {},{}".format(self._vnid, ua_uri)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cscf_host_name(self):
-        """ Get CSCF Host Name
-
-        Args:
-            None
-
-        Returns:
-            CSCF Host Name
-        """
-        cmd = "IMSCSCFNAME? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @cscf_host_name.setter
-    def cscf_host_name(self, host_name):
-        """ Set CSCF Host Name
-
-        Args:
-            host_name: CSCF Host Name
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFNAME {},{}".format(self._vnid, host_name)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cscf_ims_authentication(self):
-        """ Get CSCF IMS Auth Value
-
-        Args:
-            None
-
-        Returns:
-            CSCF IMS Auth
-        """
-        cmd = "IMSCSCFAUTH? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @cscf_ims_authentication.setter
-    def cscf_ims_authentication(self, on_off):
-        """ Set CSCF IMS Auth Value
-
-        Args:
-            on_off: CSCF IMS Auth ENABLE/DISABLE
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFAUTH {},{}".format(self._vnid, on_off)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cscf_precondition(self):
-        """ Get CSCF IMS Precondition
-
-        Args:
-            None
-
-        Returns:
-            CSCF IMS Precondition
-        """
-        cmd = "IMSCSCFPRECONDITION? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @cscf_precondition.setter
-    def cscf_precondition(self, on_off):
-        """ Set CSCF IMS Precondition
-
-        Args:
-            on_off: CSCF IMS Precondition ENABLE/DISABLE
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFPRECONDITION {},{}".format(self._vnid, on_off)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cscf_virtual_ua(self):
-        """ Get CSCF Virtual UA URI
-
-        Args:
-            None
-
-        Returns:
-            CSCF Virtual UA URI
-        """
-        cmd = "IMSCSCFVUAURI? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @cscf_virtual_ua.setter
-    def cscf_virtual_ua(self, ua_uri):
-        """ Set CSCF Virtual UA URI
-
-        Args:
-            ua_uri: CSCF Virtual UA URI
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFVUAURI {},{}".format(self._vnid, ua_uri)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def tmo_cscf_userslist_add(self):
-        """ Get CSCF USERLIST
-
-        Args:
-            None
-
-        Returns:
-            CSCF USERLIST
-        """
-        cmd = "IMSCSCFUSERSLIST? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @tmo_cscf_userslist_add.setter
-    def tmo_cscf_userslist_add(self, username):
-        """ Set CSCF USER to USERLIST
-            This is needed if IMS AUTH is enabled
-
-        Args:
-            username: CSCF Username
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFUSERSLISTADD {},{},00112233445566778899AABBCCDDEEFF,TS34108,AKAV1_MD5,\
-        OPC,00000000000000000000000000000000,8000,TRUE,FALSE,0123456789ABCDEF0123456789ABCDEF,\
-        54CDFEAB9889000001326754CDFEAB98,6754CDFEAB9889BAEFDC457623100132,\
-        326754CDFEAB9889BAEFDC4576231001,TRUE,TRUE,TRUE".format(
-            self._vnid, username)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def fi_cscf_userslist_add(self):
-        """ Get CSCF USERLIST
-
-        Args:
-            None
-
-        Returns:
-            CSCF USERLIST
-        """
-        cmd = "IMSCSCFUSERSLIST? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @fi_cscf_userslist_add.setter
-    def fi_cscf_userslist_add(self, username):
-        """ Set CSCF USER to USERLIST
-            This is needed if IMS AUTH is enabled
-
-        Args:
-            username: CSCF Username
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFUSERSLISTADD {},{},00112233445566778899AABBCCDDEEFF,TS34108,AKAV1_MD5,\
-        OPC,00000000000000000000000000000000,8000,TRUE,FALSE,0123456789ABCDEF0123456789ABCDEF,\
-        54CDFEAB9889000001326754CDFEAB98,6754CDFEAB9889BAEFDC457623100132,\
-        326754CDFEAB9889BAEFDC4576231001,TRUE,TRUE,TRUE".format(
-            self._vnid, username)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def vzw_cscf_userslist_add(self):
-        """ Get CSCF USERLIST
-
-        Args:
-            None
-
-        Returns:
-            CSCF USERLIST
-        """
-        cmd = "IMSCSCFUSERSLIST? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @vzw_cscf_userslist_add.setter
-    def vzw_cscf_userslist_add(self, username):
-        """ Set CSCF USER to USERLIST
-            This is needed if IMS AUTH is enabled
-
-        Args:
-            username: CSCF Username
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFUSERSLISTADD {},{},465B5CE8B199B49FAA5F0A2EE238A6BC,MILENAGE,AKAV1_MD5,\
-        OP,5F1D289C5D354D0A140C2548F5F3E3BA,8000,TRUE,FALSE,0123456789ABCDEF0123456789ABCDEF,\
-        54CDFEAB9889000001326754CDFEAB98,6754CDFEAB9889BAEFDC457623100132,\
-        326754CDFEAB9889BAEFDC4576231001,TRUE,TRUE,TRUE".format(
-            self._vnid, username)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def dns(self):
-        """ Gets DNS Enable status
-
-        Args:
-            None
-
-        Returns:
-            VNID DNS Enable status
-        """
-        cmd = "IMSDNS? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @dns.setter
-    def dns(self, switch):
-        """ Set DNS Enable or Disable
-
-        Args:
-            sync: ENABLE/DISABLE
-
-        Returns:
-            None
-        """
-        if not isinstance(switch, Switch):
-            raise ValueError(' The parameter should be of type "Switch"')
-        cmd = "IMSDNS {},{}".format(self._vnid, switch.value)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def ndp_nic(self):
-        """ Gets NDP Network Interface name
-
-        Args:
-            None
-
-        Returns:
-            NDP NIC name
-        """
-        cmd = "IMSNDPNIC? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @ndp_nic.setter
-    def ndp_nic(self, nic_name):
-        """ Set NDP Network Interface name
-
-        Args:
-            nic_name: NDP Network Interface name
-
-        Returns:
-            None
-        """
-        cmd = "IMSNDPNIC {},{}".format(self._vnid, nic_name)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def ndp_prefix(self):
-        """ Gets NDP IPv6 Prefix
-
-        Args:
-            None
-
-        Returns:
-            NDP IPv6 Prefix
-        """
-        cmd = "IMSNDPPREFIX? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @ndp_prefix.setter
-    def ndp_prefix(self, prefix_addr):
-        """ Set NDP IPv6 Prefix
-
-        Args:
-            prefix_addr: NDP IPV6 Prefix Addr
-
-        Returns:
-            None
-        """
-        cmd = "IMSNDPPREFIX {},{},64".format(self._vnid, prefix_addr)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def psap(self):
-        """ Gets PSAP Enable status
-
-        Args:
-            None
-
-        Returns:
-            VNID PSAP Enable status
-        """
-        cmd = "IMSPSAP? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @psap.setter
-    def psap(self, switch):
-        """ Set PSAP Enable or Disable
-
-        Args:
-            switch: ENABLE/DISABLE
-
-        Returns:
-            None
-        """
-        if not isinstance(switch, Switch):
-            raise ValueError(' The parameter should be of type "Switch"')
-        cmd = "IMSPSAP {},{}".format(self._vnid, switch.value)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def psap_auto_answer(self):
-        """ Gets PSAP Auto Answer status
-
-        Args:
-            None
-
-        Returns:
-            VNID PSAP Auto Answer status
-        """
-        cmd = "IMSPSAPAUTOANSWER? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @psap_auto_answer.setter
-    def psap_auto_answer(self, switch):
-        """ Set PSAP Auto Answer Enable or Disable
-
-        Args:
-            switch: ENABLE/DISABLE
-
-        Returns:
-            None
-        """
-        if not isinstance(switch, Switch):
-            raise ValueError(' The parameter should be of type "Switch"')
-        cmd = "IMSPSAPAUTOANSWER {},{}".format(self._vnid, switch.value)
-        self._anritsu.send_command(cmd)
-
-    def start_virtual_network(self):
-        """ Start the specified Virtual Network (IMS service)
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "IMSSTARTVN " + self._vnid
-        return self._anritsu.send_command(cmd)
diff --git a/src/antlion/controllers/anritsu_lib/mg3710a.py b/src/antlion/controllers/anritsu_lib/mg3710a.py
deleted file mode 100644
index 9d6c559..0000000
--- a/src/antlion/controllers/anritsu_lib/mg3710a.py
+++ /dev/null
@@ -1,715 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Controller interface for Anritsu Signal Generator MG3710A.
-"""
-
-import logging
-import socket
-
-from antlion.controllers.anritsu_lib._anritsu_utils import AnritsuError
-from antlion.controllers.anritsu_lib._anritsu_utils import NO_ERROR
-from antlion.controllers.anritsu_lib._anritsu_utils import OPERATION_COMPLETE
-
-from antlion import tracelogger
-
-TERMINATOR = "\n"
-
-
-def create(configs):
-    objs = []
-    for c in configs:
-        ip_address = c["ip_address"]
-        objs.append(MG3710A(ip_address))
-    return objs
-
-
-def destroy(objs):
-    return
-
-
-class MG3710A(object):
-    """Class to communicate with Anritsu Signal Generator MG3710A.
-       This uses GPIB command to interface with Anritsu MG3710A """
-
-    def __init__(self, ip_address):
-        self._ipaddr = ip_address
-        self.log = tracelogger.TraceLogger(logging.getLogger())
-
-        # Open socket connection to Signaling Tester
-        self.log.info("Opening Socket Connection with "
-                      "Signal Generator MG3710A ({}) ".format(self._ipaddr))
-        try:
-            self._sock = socket.create_connection((self._ipaddr, 49158),
-                                                  timeout=30)
-            self.send_query("*IDN?", 60)
-            self.log.info("Communication Signal Generator MG3710A OK.")
-            self.log.info("Opened Socket connection to ({})"
-                          "with handle ({})".format(self._ipaddr, self._sock))
-        except socket.timeout:
-            raise AnritsuError("Timeout happened while conencting to"
-                               " Anritsu MG3710A")
-        except socket.error:
-            raise AnritsuError("Socket creation error")
-
-    def disconnect(self):
-        """ Disconnect Signal Generator MG3710A
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        self.send_command(":SYST:COMM:GTL", opc=False)
-        self._sock.close()
-
-    def send_query(self, query, sock_timeout=10):
-        """ Sends a Query message to Anritsu MG3710A and return response
-
-        Args:
-            query - Query string
-
-        Returns:
-            query response
-        """
-        self.log.info("--> {}".format(query))
-        querytoSend = (query + TERMINATOR).encode('utf-8')
-        self._sock.settimeout(sock_timeout)
-        try:
-            self._sock.send(querytoSend)
-            result = self._sock.recv(256).rstrip(TERMINATOR.encode('utf-8'))
-            response = result.decode('utf-8')
-            self.log.info('<-- {}'.format(response))
-            return response
-        except socket.timeout:
-            raise AnritsuError("Timeout: Response from Anritsu")
-        except socket.error:
-            raise AnritsuError("Socket Error")
-
-    def send_command(self, command, sock_timeout=30, opc=True):
-        """ Sends a Command message to Anritsu MG3710A
-
-        Args:
-            command - command string
-
-        Returns:
-            None
-        """
-        self.log.info("--> {}".format(command))
-        cmdToSend = (command + TERMINATOR).encode('utf-8')
-        self._sock.settimeout(sock_timeout)
-        try:
-            self._sock.send(cmdToSend)
-            if opc:
-                # check operation status
-                status = self.send_query("*OPC?")
-                if int(status) != OPERATION_COMPLETE:
-                    raise AnritsuError("Operation not completed")
-        except socket.timeout:
-            raise AnritsuError("Timeout for Command Response from Anritsu")
-        except socket.error:
-            raise AnritsuError("Socket Error for Anritsu command")
-        return
-
-    @property
-    def sg(self):
-        """ Gets current selected signal generator(SG)
-
-        Args:
-            None
-
-        Returns:
-            selected signal generatr number
-        """
-        return self.send_query("PORT?")
-
-    @sg.setter
-    def sg(self, sg_number):
-        """ Selects the signal generator to be controlled
-
-        Args:
-            sg_number: sg number 1 | 2
-
-        Returns:
-            None
-        """
-        cmd = "PORT {}".format(sg_number)
-        self.send_command(cmd)
-
-    def get_modulation_state(self, sg=1):
-        """ Gets the RF signal modulation state (ON/OFF) of signal generator
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            modulation state . 0 (OFF) | 1(ON)
-        """
-        return self.send_query("OUTP{}:MOD?".format(sg))
-
-    def set_modulation_state(self, state, sg=1):
-        """ Sets the RF signal modulation state
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            state : ON/OFF
-
-        Returns:
-            None
-        """
-        cmd = "OUTP{}:MOD {}".format(sg, state)
-        self.send_command(cmd)
-
-    def get_rf_output_state(self, sg=1):
-        """ Gets RF signal output state (ON/OFF) of signal generator
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            RF signal output state . 0 (OFF) | 1(ON)
-        """
-        return self.send_query("OUTP{}?".format(sg))
-
-    def set_rf_output_state(self, state, sg=1):
-        """ Sets the RF signal output state
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            state : ON/OFF
-
-        Returns:
-            None
-        """
-        cmd = "OUTP{} {}".format(sg, state)
-        self.send_command(cmd)
-
-    def get_frequency(self, sg=1):
-        """ Gets the selected frequency of signal generator
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            selected frequency
-        """
-        return self.send_query("SOUR{}:FREQ?".format(sg))
-
-    def set_frequency(self, freq, sg=1):
-        """ Sets the frequency of signal generator
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            freq : frequency
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:FREQ {}".format(sg, freq)
-        self.send_command(cmd)
-
-    def get_frequency_offset_state(self, sg=1):
-        """ Gets the Frequency Offset enable state (ON/OFF) of signal generator
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            Frequency Offset enable state . 0 (OFF) | 1(ON)
-        """
-        return self.send_query("SOUR{}:FREQ:OFFS:STAT?".format(sg))
-
-    def set_frequency_offset_state(self, state, sg=1):
-        """ Sets the Frequency Offset enable state
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            state : enable state, ON/OFF
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:FREQ:OFFS:STAT {}".format(sg, state)
-        self.send_command(cmd)
-
-    def get_frequency_offset(self, sg=1):
-        """ Gets the current frequency offset value
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            current frequency offset value
-        """
-        return self.send_query("SOUR{}:FREQ:OFFS?".format(sg))
-
-    def set_frequency_offset(self, offset, sg=1):
-        """ Sets the frequency offset value
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            offset : frequency offset value
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:FREQ:OFFS {}".format(sg, offset)
-        self.send_command(cmd)
-
-    def get_frequency_offset_multiplier_state(self, sg=1):
-        """ Gets the Frequency Offset multiplier enable state (ON/OFF) of
-            signal generator
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            Frequency Offset  multiplier enable state . 0 (OFF) | 1(ON)
-        """
-        return self.send_query("SOUR{}:FREQ:MULT:STAT?".format(sg))
-
-    def set_frequency_offset_multiplier_state(self, state, sg=1):
-        """ Sets the  Frequency Offset multiplier enable state
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            state : enable state, ON/OFF
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:FREQ:MULT:STAT {}".format(sg, state)
-        self.send_command(cmd)
-
-    def get_frequency_offset_multiplier(self, sg=1):
-        """ Gets the current frequency offset multiplier value
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            frequency offset multiplier value
-        """
-        return self.send_query("SOUR{}:FREQ:MULT?".format(sg))
-
-    def set_frequency_offset_multiplier(self, multiplier, sg=1):
-        """ Sets the frequency offset multiplier value
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            multiplier : frequency offset multiplier value
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:FREQ:MULT {}".format(sg, multiplier)
-        self.send_command(cmd)
-
-    def get_channel(self, sg=1):
-        """ Gets the current channel number
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            current channel number
-        """
-        return self.send_query("SOUR{}:FREQ:CHAN:NUMB?".format(sg))
-
-    def set_channel(self, channel, sg=1):
-        """ Sets the channel number
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            channel : channel number
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:FREQ:CHAN:NUMB {}".format(sg, channel)
-        self.send_command(cmd)
-
-    def get_channel_group(self, sg=1):
-        """ Gets the current channel group number
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            current channel group number
-        """
-        return self.send_query("SOUR{}:FREQ:CHAN:GRO?".format(sg))
-
-    def set_channel_group(self, group, sg=1):
-        """ Sets the channel group number
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            group : channel group number
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:FREQ:CHAN:GRO {}".format(sg, group)
-        self.send_command(cmd)
-
-    def get_rf_output_level(self, sg=1):
-        """ Gets the current RF output level
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            current RF output level
-        """
-        return self.send_query("SOUR{}:POW:CURR?".format(sg))
-
-    def get_output_level_unit(self, sg=1):
-        """ Gets the current RF output level unit
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            current RF output level unit
-        """
-        return self.send_query("UNIT{}:POW?".format(sg))
-
-    def set_output_level_unit(self, unit, sg=1):
-        """ Sets the RF output level unit
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            unit : Output level unit
-
-        Returns:
-            None
-        """
-        cmd = "UNIT{}:POW {}".format(sg, unit)
-        self.send_command(cmd)
-
-    def get_output_level(self, sg=1):
-        """ Gets the Output level
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            Output level
-        """
-        return self.send_query("SOUR{}:POW?".format(sg))
-
-    def set_output_level(self, level, sg=1):
-        """ Sets the Output level
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            level : Output level
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:POW {}".format(sg, level)
-        self.send_command(cmd)
-
-    def get_arb_state(self, sg=1):
-        """ Gets the ARB function state
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            ARB function state . 0 (OFF) | 1(ON)
-        """
-        return self.send_query("SOUR{}:RAD:ARB?".format(sg))
-
-    def set_arb_state(self, state, sg=1):
-        """ Sets the ARB function state
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            state : enable state (ON/OFF)
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:RAD:ARB {}".format(sg, state)
-        self.send_command(cmd)
-
-    def restart_arb_waveform_pattern(self, sg=1):
-        """ playback the waveform pattern from the beginning.
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:RAD:ARB:WAV:REST".format(sg)
-        self.send_command(cmd)
-
-    def load_waveform(self, package_name, pattern_name, memory, sg=1):
-        """ loads the waveform from HDD to specified memory
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            package_name : Package name of signal
-            pattern_name : Pattern name of signal
-            memory: memory for the signal - "A" or "B"
-
-        Returns:
-            None
-        """
-        cmd = "MMEM{}:LOAD:WAV:WM{} '{}','{}'".format(sg, memory, package_name,
-                                                      pattern_name)
-        self.send_command(cmd)
-
-    def select_waveform(self, package_name, pattern_name, memory, sg=1):
-        """ Selects the waveform to output on specified memory
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            package_name : Package name of signal
-            pattern_name : Pattern name of signal
-            memory: memory for the signal - "A" or "B"
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:RAD:ARB:WM{}:WAV '{}','{}'".format(
-            sg, memory, package_name, pattern_name)
-        self.send_command(cmd)
-
-    def get_freq_relative_display_status(self, sg=1):
-        """ Gets the frequency relative display status
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            frequency relative display status.   0 (OFF) | 1(ON)
-        """
-        return self.send_query("SOUR{}:FREQ:REF:STAT?".format(sg))
-
-    def set_freq_relative_display_status(self, enable, sg=1):
-        """ Sets frequency relative display status
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            enable : enable type (ON/OFF)
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:FREQ:REF:STAT {}".format(sg, enable)
-        self.send_command(cmd)
-
-    def get_freq_channel_display_type(self, sg=1):
-        """ Gets the selected type(frequency/channel) for input display
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            selected type(frequecy/channel) for input display
-        """
-        return self.send_query("SOUR{}:FREQ:TYPE?".format(sg))
-
-    def set_freq_channel_display_type(self, freq_channel, sg=1):
-        """ Sets thes type(frequency/channel) for input display
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            freq_channel : display type (frequency/channel)
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:FREQ:TYPE {}".format(sg, freq_channel)
-        self.send_command(cmd)
-
-    def get_arb_combination_mode(self, sg=1):
-        """ Gets the current mode to generate the pattern
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            current mode to generate the pattern
-        """
-        return self.send_query("SOUR{}:RAD:ARB:PCOM?".format(sg))
-
-    def set_arb_combination_mode(self, mode, sg=1):
-        """ Sets the mode to generate the pattern
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            mode : pattern generation mode
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:RAD:ARB:PCOM {}".format(sg, mode)
-        self.send_command(cmd)
-
-    def get_arb_pattern_aorb_state(self, a_or_b, sg=1):
-        """ Gets the Pattern A/B output state
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            a_or_b : Patten A or Pattern B( "A" or "B")
-
-        Returns:
-            Pattern A/B output state . 0(OFF) | 1(ON)
-        """
-        return self.send_query("SOUR{}:RAD:ARB:WM{}:OUTP?".format(a_or_b, sg))
-
-    def set_arb_pattern_aorb_state(self, a_or_b, state, sg=1):
-        """ Sets the Pattern A/B output state
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            a_or_b : Patten A or Pattern B( "A" or "B")
-            state : output state
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:RAD:ARB:WM{}:OUTP {}".format(sg, a_or_b, state)
-        self.send_command(cmd)
-
-    def get_arb_level_aorb(self, a_or_b, sg=1):
-        """ Gets the Pattern A/B output level
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            a_or_b : Patten A or Pattern B( "A" or "B")
-
-        Returns:
-             Pattern A/B output level
-        """
-        return self.send_query("SOUR{}:RAD:ARB:WM{}:POW?".format(sg, a_or_b))
-
-    def set_arb_level_aorb(self, a_or_b, level, sg=1):
-        """ Sets the Pattern A/B output level
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            a_or_b : Patten A or Pattern B( "A" or "B")
-            level : output level
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:RAD:ARB:WM{}:POW {}".format(sg, a_or_b, level)
-        self.send_command(cmd)
-
-    def get_arb_freq_offset(self, sg=1):
-        """ Gets the frequency offset between Pattern A and Patten B
-            when CenterSignal is A or B.
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            frequency offset between Pattern A and Patten B
-        """
-        return self.send_query("SOUR{}:RAD:ARB:FREQ:OFFS?".format(sg))
-
-    def set_arb_freq_offset(self, offset, sg=1):
-        """ Sets the frequency offset between Pattern A and Patten B when
-            CenterSignal is A or B.
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            offset : frequency offset
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:RAD:ARB:FREQ:OFFS {}".format(sg, offset)
-        self.send_command(cmd)
-
-    def get_arb_freq_offset_aorb(self, sg=1):
-        """ Gets the frequency offset of Pattern A/Pattern B based on Baseband
-            center frequency
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            frequency offset
-        """
-        return self.send_query("SOUR{}:RAD:ARB:WM{}:FREQ:OFFS?".format(
-            sg, a_or_b))
-
-    def set_arb_freq_offset_aorb(self, a_or_b, offset, sg=1):
-        """ Sets the frequency offset of Pattern A/Pattern B based on Baseband
-            center frequency
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            a_or_b : Patten A or Pattern B( "A" or "B")
-            offset : frequency offset
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:RAD:ARB:WM{}:FREQ:OFFS {}".format(sg, a_or_b, offset)
-        self.send_command(cmd)
diff --git a/src/antlion/controllers/ap_lib/ap_get_interface.py b/src/antlion/controllers/ap_lib/ap_get_interface.py
index ddd6987..74a6d2c 100644
--- a/src/antlion/controllers/ap_lib/ap_get_interface.py
+++ b/src/antlion/controllers/ap_lib/ap_get_interface.py
@@ -15,11 +15,17 @@
 # limitations under the License.
 
 import logging
+
+from typing import List, Optional, Tuple, TYPE_CHECKING
+
 from antlion.libs.proc import job
 
-GET_ALL_INTERFACE = 'ls /sys/class/net'
-GET_VIRTUAL_INTERFACE = 'ls /sys/devices/virtual/net'
-BRCTL_SHOW = 'brctl show'
+if TYPE_CHECKING:
+    from antlion.controllers.access_point import AccessPoint
+
+GET_ALL_INTERFACE = "ls /sys/class/net"
+GET_VIRTUAL_INTERFACE = "ls /sys/devices/virtual/net"
+BRCTL_SHOW = "brctl show"
 
 
 class ApInterfacesError(Exception):
@@ -27,10 +33,11 @@
 
 
 class ApInterfaces(object):
-    """Class to get network interface information for the device.
+    """Class to get network interface information for the device."""
 
-    """
-    def __init__(self, ap, wan_interface_override=None):
+    def __init__(
+        self, ap: "AccessPoint", wan_interface_override: Optional[str] = None
+    ) -> None:
         """Initialize the ApInterface class.
 
         Args:
@@ -40,29 +47,29 @@
         self.ssh = ap.ssh
         self.wan_interface_override = wan_interface_override
 
-    def get_all_interface(self):
+    def get_all_interface(self) -> List[str]:
         """Get all network interfaces on the device.
 
         Returns:
             interfaces_all: list of all the network interfaces on device
         """
         output = self.ssh.run(GET_ALL_INTERFACE)
-        interfaces_all = output.stdout.split('\n')
+        interfaces_all = output.stdout.split("\n")
 
         return interfaces_all
 
-    def get_virtual_interface(self):
+    def get_virtual_interface(self) -> List[str]:
         """Get all virtual interfaces on the device.
 
         Returns:
             interfaces_virtual: list of all the virtual interfaces on device
         """
         output = self.ssh.run(GET_VIRTUAL_INTERFACE)
-        interfaces_virtual = output.stdout.split('\n')
+        interfaces_virtual = output.stdout.split("\n")
 
         return interfaces_virtual
 
-    def get_physical_interface(self):
+    def get_physical_interface(self) -> List[str]:
         """Get all the physical interfaces of the device.
 
         Get all physical interfaces such as eth ports and wlan ports
@@ -75,7 +82,7 @@
 
         return interfaces_phy
 
-    def get_bridge_interface(self):
+    def get_bridge_interface(self) -> Optional[List[str]]:
         """Get all the bridge interfaces of the device.
 
         Returns:
@@ -85,17 +92,16 @@
         interfaces_bridge = []
         try:
             output = self.ssh.run(BRCTL_SHOW)
-            lines = output.stdout.split('\n')
+            lines = output.stdout.split("\n")
             for line in lines:
-                interfaces_bridge.append(line.split('\t')[0])
+                interfaces_bridge.append(line.split("\t")[0])
             interfaces_bridge.pop(0)
-            interfaces_bridge = [x for x in interfaces_bridge if x != '']
-            return interfaces_bridge
+            return [x for x in interfaces_bridge if x != ""]
         except job.Error:
-            logging.info('No brctl utility is available')
+            logging.info("No brctl utility is available")
             return None
 
-    def get_wlan_interface(self):
+    def get_wlan_interface(self) -> Tuple[str, str]:
         """Get all WLAN interfaces and specify 2.4 GHz and 5 GHz interfaces.
 
         Returns:
@@ -107,21 +113,18 @@
         wlan_5g = None
         interfaces_phy = self.get_physical_interface()
         for iface in interfaces_phy:
-            IW_LIST_FREQ = 'iwlist %s freq' % iface
-            output = self.ssh.run(IW_LIST_FREQ)
-            if 'Channel 06' in output.stdout and 'Channel 36' not in output.stdout:
+            output = self.ssh.run(f"iwlist {iface} freq")
+            if "Channel 06" in output.stdout and "Channel 36" not in output.stdout:
                 wlan_2g = iface
-            elif 'Channel 36' in output.stdout and 'Channel 06' not in output.stdout:
+            elif "Channel 36" in output.stdout and "Channel 06" not in output.stdout:
                 wlan_5g = iface
 
-        interfaces_wlan = [wlan_2g, wlan_5g]
+        if wlan_2g is None or wlan_5g is None:
+            raise ApInterfacesError("Missing at least one WLAN interface")
 
-        if None not in interfaces_wlan:
-            return interfaces_wlan
+        return (wlan_2g, wlan_5g)
 
-        raise ApInterfacesError('Missing at least one WLAN interface')
-
-    def get_wan_interface(self):
+    def get_wan_interface(self) -> str:
         """Get the WAN interface which has internet connectivity. If a wan
         interface is already specified return that instead.
 
@@ -145,13 +148,13 @@
         if wan:
             return wan
 
-        output = self.ssh.run('ifconfig')
-        interfaces_all = output.stdout.split('\n')
-        logging.info("IFCONFIG output = %s" % interfaces_all)
+        output = self.ssh.run("ifconfig")
+        interfaces_all = output.stdout.split("\n")
+        logging.info(f"IFCONFIG output = {interfaces_all}")
 
-        raise ApInterfacesError('No WAN interface available')
+        raise ApInterfacesError("No WAN interface available")
 
-    def get_lan_interface(self):
+    def get_lan_interface(self) -> Optional[str]:
         """Get the LAN interface connecting to local devices.
 
         Returns:
@@ -165,14 +168,13 @@
         interface_wan = self.get_wan_interface()
         interfaces_eth.remove(interface_wan)
         for iface in interfaces_eth:
-            LAN_CHECK = 'ifconfig %s' % iface
-            output = self.ssh.run(LAN_CHECK)
-            if 'RUNNING' in output.stdout:
+            output = self.ssh.run(f"ifconfig {iface}")
+            if "RUNNING" in output.stdout:
                 lan = iface
                 break
         return lan
 
-    def check_ping(self, iface):
+    def check_ping(self, iface: str) -> int:
         """Check the ping status on specific interface to determine the WAN.
 
         Args:
@@ -180,9 +182,8 @@
         Returns:
             network_status: the connectivity status of the interface
         """
-        PING = 'ping -c 3 -I %s 8.8.8.8' % iface
         try:
-            self.ssh.run(PING)
+            self.ssh.run(f"ping -c 3 -I {iface} 8.8.8.8")
             return 1
         except job.Error:
             return 0
diff --git a/src/antlion/controllers/ap_lib/ap_iwconfig.py b/src/antlion/controllers/ap_lib/ap_iwconfig.py
index 550f785..225a397 100644
--- a/src/antlion/controllers/ap_lib/ap_iwconfig.py
+++ b/src/antlion/controllers/ap_lib/ap_iwconfig.py
@@ -14,7 +14,12 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from antlion.libs.proc import job
+from typing import Optional, TYPE_CHECKING
+
+from antlion.libs.proc.job import Result
+
+if TYPE_CHECKING:
+    from antlion.controllers.access_point import AccessPoint
 
 
 class ApIwconfigError(Exception):
@@ -22,13 +27,11 @@
 
 
 class ApIwconfig(object):
-    """Class to configure wireless interface via iwconfig
+    """Class to configure wireless interface via iwconfig"""
 
-    """
+    PROGRAM_FILE = "/usr/local/sbin/iwconfig"
 
-    PROGRAM_FILE = '/usr/local/sbin/iwconfig'
-
-    def __init__(self, ap):
+    def __init__(self, ap: "AccessPoint") -> None:
         """Initialize the ApIwconfig class.
 
         Args:
@@ -36,14 +39,12 @@
         """
         self.ssh = ap.ssh
 
-    def ap_iwconfig(self, interface, arguments=None):
+    def ap_iwconfig(
+        self, interface: str, arguments: Optional[str] = None
+    ) -> Optional[Result]:
         """Configure the wireless interface using iwconfig.
 
         Returns:
             output: the output of the command, if any
         """
-        iwconfig_command = '%s %s %s' % (self.PROGRAM_FILE, interface,
-                                         arguments)
-        output = self.ssh.run(iwconfig_command)
-
-        return output
+        return self.ssh.run(f"{self.PROGRAM_FILE} {interface} {arguments}")
diff --git a/src/antlion/controllers/ap_lib/bridge_interface.py b/src/antlion/controllers/ap_lib/bridge_interface.py
index 2dd82d8..ee4733e 100644
--- a/src/antlion/controllers/ap_lib/bridge_interface.py
+++ b/src/antlion/controllers/ap_lib/bridge_interface.py
@@ -18,17 +18,16 @@
 import time
 from antlion.libs.proc import job
 
-_BRCTL = 'brctl'
-BRIDGE_NAME = 'br-lan'
-CREATE_BRIDGE = '%s addbr %s' % (_BRCTL, BRIDGE_NAME)
-DELETE_BRIDGE = '%s delbr %s' % (_BRCTL, BRIDGE_NAME)
-BRING_DOWN_BRIDGE = 'ifconfig %s down' % BRIDGE_NAME
+_BRCTL = "brctl"
+BRIDGE_NAME = "br-lan"
+CREATE_BRIDGE = "%s addbr %s" % (_BRCTL, BRIDGE_NAME)
+DELETE_BRIDGE = "%s delbr %s" % (_BRCTL, BRIDGE_NAME)
+BRING_DOWN_BRIDGE = "ifconfig %s down" % BRIDGE_NAME
 
 
 class BridgeInterfaceConfigs(object):
-    """Configs needed for creating bridge interface between LAN and WLAN.
+    """Configs needed for creating bridge interface between LAN and WLAN."""
 
-    """
     def __init__(self, iface_wlan, iface_lan, bridge_ip):
         """Set bridge interface configs based on the channel info.
 
@@ -43,9 +42,8 @@
 
 
 class BridgeInterface(object):
-    """Class object for bridge interface betwen WLAN and LAN
+    """Class object for bridge interface betwen WLAN and LAN"""
 
-    """
     def __init__(self, ap):
         """Initialize the BridgeInterface class.
 
@@ -62,40 +60,44 @@
             brconfigs: the bridge interface config, type BridgeInterfaceConfigs
         """
 
-        logging.info('Create bridge interface between LAN and WLAN')
+        logging.info("Create bridge interface between LAN and WLAN")
         # Create the bridge
         try:
             self.ssh.run(CREATE_BRIDGE)
         except job.Error:
             logging.warning(
-                'Bridge interface {} already exists, no action needed'.format(
-                    BRIDGE_NAME))
+                "Bridge interface {} already exists, no action needed".format(
+                    BRIDGE_NAME
+                )
+            )
 
         # Enable 4addr mode on for the wlan interface
-        ENABLE_4ADDR = 'iw dev %s set 4addr on' % (brconfigs.iface_wlan)
+        ENABLE_4ADDR = "iw dev %s set 4addr on" % (brconfigs.iface_wlan)
         try:
             self.ssh.run(ENABLE_4ADDR)
         except job.Error:
-            logging.warning('4addr is already enabled on {}'.format(
-                brconfigs.iface_wlan))
+            logging.warning(
+                "4addr is already enabled on {}".format(brconfigs.iface_wlan)
+            )
 
         # Add both LAN and WLAN interfaces to the bridge interface
         for interface in [brconfigs.iface_lan, brconfigs.iface_wlan]:
-            ADD_INTERFACE = '%s addif %s %s' % (_BRCTL, BRIDGE_NAME, interface)
+            ADD_INTERFACE = "%s addif %s %s" % (_BRCTL, BRIDGE_NAME, interface)
             try:
                 self.ssh.run(ADD_INTERFACE)
             except job.Error:
-                logging.warning('{} has already been added to {}'.format(
-                    interface, BRIDGE_NAME))
+                logging.warning(
+                    "{} has already been added to {}".format(interface, BRIDGE_NAME)
+                )
         time.sleep(5)
 
         # Set IP address on the bridge interface to bring it up
-        SET_BRIDGE_IP = 'ifconfig %s %s' % (BRIDGE_NAME, brconfigs.bridge_ip)
+        SET_BRIDGE_IP = "ifconfig %s %s" % (BRIDGE_NAME, brconfigs.bridge_ip)
         self.ssh.run(SET_BRIDGE_IP)
         time.sleep(2)
 
         # Bridge interface is up
-        logging.info('Bridge interface is up and running')
+        logging.info("Bridge interface is up and running")
 
     def teardown(self, brconfigs):
         """Tear down the bridge interface.
@@ -103,17 +105,17 @@
         Args:
             brconfigs: the bridge interface config, type BridgeInterfaceConfigs
         """
-        logging.info('Bringing down the bridge interface')
+        logging.info("Bringing down the bridge interface")
         # Delete the bridge interface
         self.ssh.run(BRING_DOWN_BRIDGE)
         time.sleep(1)
         self.ssh.run(DELETE_BRIDGE)
 
         # Bring down wlan interface and disable 4addr mode
-        BRING_DOWN_WLAN = 'ifconfig %s down' % brconfigs.iface_wlan
+        BRING_DOWN_WLAN = "ifconfig %s down" % brconfigs.iface_wlan
         self.ssh.run(BRING_DOWN_WLAN)
         time.sleep(2)
-        DISABLE_4ADDR = 'iw dev %s set 4addr off' % (brconfigs.iface_wlan)
+        DISABLE_4ADDR = "iw dev %s set 4addr off" % (brconfigs.iface_wlan)
         self.ssh.run(DISABLE_4ADDR)
         time.sleep(1)
-        logging.info('Bridge interface is down')
+        logging.info("Bridge interface is down")
diff --git a/src/antlion/controllers/ap_lib/dhcp_config.py b/src/antlion/controllers/ap_lib/dhcp_config.py
index 4cc7adf..a50b6d0 100644
--- a/src/antlion/controllers/ap_lib/dhcp_config.py
+++ b/src/antlion/controllers/ap_lib/dhcp_config.py
@@ -14,7 +14,7 @@
 
 import copy
 
-_ROUTER_DNS = '8.8.8.8, 4.4.4.4'
+_ROUTER_DNS = "8.8.8.8, 4.4.4.4"
 
 
 class Subnet(object):
@@ -30,14 +30,16 @@
         additional_options: A dictionary corresponding to DHCP options.
     """
 
-    def __init__(self,
-                 subnet,
-                 start=None,
-                 end=None,
-                 router=None,
-                 lease_time=None,
-                 additional_parameters={},
-                 additional_options={}):
+    def __init__(
+        self,
+        subnet,
+        start=None,
+        end=None,
+        router=None,
+        lease_time=None,
+        additional_parameters={},
+        additional_options={},
+    ):
         """
         Args:
             subnet: ipaddress.IPv4Network, The address space of the subnetwork
@@ -64,9 +66,9 @@
             self.start = self.network[2]
 
         if not self.start in self.network:
-            raise ValueError('The start range is not in the subnet.')
+            raise ValueError("The start range is not in the subnet.")
         if self.start.is_reserved:
-            raise ValueError('The start of the range cannot be reserved.')
+            raise ValueError("The start of the range cannot be reserved.")
 
         if end:
             self.end = end
@@ -74,18 +76,17 @@
             self.end = self.network[-2]
 
         if not self.end in self.network:
-            raise ValueError('The end range is not in the subnet.')
+            raise ValueError("The end range is not in the subnet.")
         if self.end.is_reserved:
-            raise ValueError('The end of the range cannot be reserved.')
+            raise ValueError("The end of the range cannot be reserved.")
         if self.end < self.start:
-            raise ValueError(
-                'The end must be an address larger than the start.')
+            raise ValueError("The end must be an address larger than the start.")
 
         if router:
             if router >= self.start and router <= self.end:
-                raise ValueError('Router must not be in pool range.')
+                raise ValueError("Router must not be in pool range.")
             if not router in self.network:
-                raise ValueError('Router must be in the given subnet.')
+                raise ValueError("Router must be in the given subnet.")
 
             self.router = router
         else:
@@ -102,13 +103,13 @@
                     break
 
             if not self.router:
-                raise ValueError('No useable host found.')
+                raise ValueError("No useable host found.")
 
         self.lease_time = lease_time
         self.additional_parameters = additional_parameters
         self.additional_options = additional_options
-        if 'domain-name-servers' not in self.additional_options:
-            self.additional_options['domain-name-servers'] = _ROUTER_DNS
+        if "domain-name-servers" not in self.additional_options:
+            self.additional_options["domain-name-servers"] = _ROUTER_DNS
 
 
 class StaticMapping(object):
@@ -137,14 +138,15 @@
         max_lease_time: The max time to allow a lease.
     """
 
-    def __init__(self,
-                 subnets=None,
-                 static_mappings=None,
-                 default_lease_time=600,
-                 max_lease_time=7200):
+    def __init__(
+        self,
+        subnets=None,
+        static_mappings=None,
+        default_lease_time=600,
+        max_lease_time=7200,
+    ):
         self.subnets = copy.deepcopy(subnets) if subnets else []
-        self.static_mappings = (copy.deepcopy(static_mappings)
-                                if static_mappings else [])
+        self.static_mappings = copy.deepcopy(static_mappings) if static_mappings else []
         self.default_lease_time = default_lease_time
         self.max_lease_time = max_lease_time
 
@@ -155,9 +157,9 @@
         lines = []
 
         if self.default_lease_time:
-            lines.append('default-lease-time %d;' % self.default_lease_time)
+            lines.append("default-lease-time %d;" % self.default_lease_time)
         if self.max_lease_time:
-            lines.append('max-lease-time %s;' % self.max_lease_time)
+            lines.append("max-lease-time %s;" % self.max_lease_time)
 
         for subnet in self.subnets:
             address = subnet.network.network_address
@@ -169,35 +171,35 @@
             additional_parameters = subnet.additional_parameters
             additional_options = subnet.additional_options
 
-            lines.append('subnet %s netmask %s {' % (address, mask))
-            lines.append('\tpool {')
-            lines.append('\t\toption subnet-mask %s;' % mask)
-            lines.append('\t\toption routers %s;' % router)
-            lines.append('\t\trange %s %s;' % (start, end))
+            lines.append("subnet %s netmask %s {" % (address, mask))
+            lines.append("\tpool {")
+            lines.append("\t\toption subnet-mask %s;" % mask)
+            lines.append("\t\toption routers %s;" % router)
+            lines.append("\t\trange %s %s;" % (start, end))
             if lease_time:
-                lines.append('\t\tdefault-lease-time %d;' % lease_time)
-                lines.append('\t\tmax-lease-time %d;' % lease_time)
+                lines.append("\t\tdefault-lease-time %d;" % lease_time)
+                lines.append("\t\tmax-lease-time %d;" % lease_time)
             for param, value in additional_parameters.items():
-                lines.append('\t\t%s %s;' % (param, value))
+                lines.append("\t\t%s %s;" % (param, value))
             for option, value in additional_options.items():
-                lines.append('\t\toption %s %s;' % (option, value))
-            lines.append('\t}')
-            lines.append('}')
+                lines.append("\t\toption %s %s;" % (option, value))
+            lines.append("\t}")
+            lines.append("}")
 
         for mapping in self.static_mappings:
             identifier = mapping.identifier
             fixed_address = mapping.ipv4_address
-            host_fake_name = 'host%s' % identifier.replace(':', '')
+            host_fake_name = "host%s" % identifier.replace(":", "")
             lease_time = mapping.lease_time
 
-            lines.append('host %s {' % host_fake_name)
-            lines.append('\thardware ethernet %s;' % identifier)
-            lines.append('\tfixed-address %s;' % fixed_address)
+            lines.append("host %s {" % host_fake_name)
+            lines.append("\thardware ethernet %s;" % identifier)
+            lines.append("\tfixed-address %s;" % fixed_address)
             if lease_time:
-                lines.append('\tdefault-lease-time %d;' % lease_time)
-                lines.append('\tmax-lease-time %d;' % lease_time)
-            lines.append('}')
+                lines.append("\tdefault-lease-time %d;" % lease_time)
+                lines.append("\tmax-lease-time %d;" % lease_time)
+            lines.append("}")
 
-        config_str = '\n'.join(lines)
+        config_str = "\n".join(lines)
 
         return config_str
diff --git a/src/antlion/controllers/ap_lib/dhcp_server.py b/src/antlion/controllers/ap_lib/dhcp_server.py
index 01411c6..c52983b 100644
--- a/src/antlion/controllers/ap_lib/dhcp_server.py
+++ b/src/antlion/controllers/ap_lib/dhcp_server.py
@@ -16,6 +16,7 @@
 
 from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_fixed
 
+from antlion.controllers.ap_lib.dhcp_config import DhcpConfig
 from antlion.controllers.utils_lib.commands import shell
 from antlion import logger
 
@@ -37,9 +38,9 @@
         config: The dhcp server configuration that is being used.
     """
 
-    PROGRAM_FILE = 'dhcpd'
+    PROGRAM_FILE = "dhcpd"
 
-    def __init__(self, runner, interface, working_dir='/tmp'):
+    def __init__(self, runner, interface, working_dir="/tmp"):
         """
         Args:
             runner: Object that has a run_async and run methods for running
@@ -47,33 +48,34 @@
             interface: string, The name of the interface to use.
             working_dir: The directory to work out of.
         """
-        self._log = logger.create_logger(lambda msg: '[DHCP Server|%s] %s' % (
-            interface, msg))
+        self._log = logger.create_logger(lambda msg: f"[DHCP Server|{interface}] {msg}")
+
         self._runner = runner
         self._working_dir = working_dir
         self._shell = shell.ShellCommand(runner, working_dir)
-        self._stdio_log_file = 'dhcpd_%s.log' % interface
-        self._config_file = 'dhcpd_%s.conf' % interface
-        self._lease_file = 'dhcpd_%s.leases' % interface
-        self._pid_file = 'dhcpd_%s.pid' % interface
-        self._identifier = '%s.*%s' % (self.PROGRAM_FILE, self._config_file)
+        self._stdio_log_file = f"dhcpd_{interface}.log"
+        self._config_file = f"dhcpd_{interface}.conf"
+        self._lease_file = f"dhcpd_{interface}.leases"
+        self._pid_file = f"dhcpd_{interface}.pid"
+        self._identifier = f"{self.PROGRAM_FILE}.*{self._config_file}"
 
     # There is a slight timing issue where if the proc filesystem in Linux
     # doesn't get updated in time as when this is called, the NoInterfaceError
     # will happening.  By adding this retry, the error appears to have gone away
     # but will still show a warning if the problem occurs.  The error seems to
     # happen more with bridge interfaces than standard interfaces.
-    @retry(retry=retry_if_exception_type(NoInterfaceError),
-           stop=stop_after_attempt(3),
-           wait=wait_fixed(1))
-    def start(self, config, timeout=60):
+    @retry(
+        retry=retry_if_exception_type(NoInterfaceError),
+        stop=stop_after_attempt(3),
+        wait=wait_fixed(1),
+    )
+    def start(self, config: DhcpConfig, timeout_sec: int = 60) -> None:
         """Starts the dhcp server.
 
         Starts the dhcp server daemon and runs it in the background.
 
         Args:
-            config: dhcp_config.DhcpConfig, Configs to start the dhcp server
-                    with.
+            config: Configs to start the dhcp server with.
 
         Raises:
             Error: Raised when a dhcp server error is found.
@@ -86,20 +88,24 @@
         self._shell.delete_file(self._pid_file)
         self._shell.touch_file(self._lease_file)
 
-        dhcpd_command = '%s -cf "%s" -lf %s -f -pf "%s"' % (
-            self.PROGRAM_FILE, self._config_file, self._lease_file,
-            self._pid_file)
-        base_command = 'cd "%s"; %s' % (self._working_dir, dhcpd_command)
-        job_str = '%s > "%s" 2>&1' % (base_command, self._stdio_log_file)
+        dhcpd_command = (
+            f"{self.PROGRAM_FILE} "
+            f'-cf "{self._config_file}" '
+            f"-lf {self._lease_file} "
+            f'-pf "{self._pid_file}" '
+            "-f -d"
+        )
+
+        base_command = f'cd "{self._working_dir}"; {dhcpd_command}'
+        job_str = f'{base_command} > "{self._stdio_log_file}" 2>&1'
         self._runner.run_async(job_str)
 
         try:
-            self._wait_for_process(timeout=timeout)
-            self._wait_for_server(timeout=timeout)
+            self._wait_for_process(timeout=timeout_sec)
+            self._wait_for_server(timeout=timeout_sec)
         except:
             self._log.warn("Failed to start DHCP server.")
-            self._log.info("DHCP configuration:\n" +
-                           config.render_config_file() + "\n")
+            self._log.info("DHCP configuration:\n" + config.render_config_file() + "\n")
             self._log.info("DHCP logs:\n" + self.get_logs() + "\n")
             self.stop()
             raise
@@ -116,30 +122,13 @@
         """
         return self._shell.is_alive(self._identifier)
 
-    def get_logs(self):
+    def get_logs(self) -> str:
         """Pulls the log files from where dhcp server is running.
 
         Returns:
             A string of the dhcp server logs.
         """
-        try:
-            # Try reading the PID file. This will fail if the server failed to
-            # start.
-            pid = self._shell.read_file(self._pid_file)
-            # `dhcpd` logs to the syslog, where its messages are interspersed
-            # with all other programs that use the syslog. Log lines contain
-            # `dhcpd[<pid>]`, which we can search for to extract all the logs
-            # from this particular dhcpd instance.
-            # The logs are preferable to the stdio output, since they contain
-            # a superset of the information from stdio, including leases
-            # that the server provides.
-            return self._shell.run(
-                f"grep dhcpd.{pid} /var/log/messages").stdout
-        except Exception:
-            self._log.info(
-                "Failed to read logs from syslog (likely because the server " +
-                "failed to start). Falling back to stdio output.")
-            return self._shell.read_file(self._stdio_log_file)
+        return self._shell.read_file(self._stdio_log_file)
 
     def _wait_for_process(self, timeout=60):
         """Waits for the process to come up.
@@ -168,7 +157,8 @@
         start_time = time.time()
         while time.time() - start_time < timeout:
             success = self._shell.search_file(
-                'Wrote [0-9]* leases to leases file', self._stdio_log_file)
+                "Wrote [0-9]* leases to leases file", self._stdio_log_file
+            )
             if success:
                 return
 
@@ -194,14 +184,16 @@
         is_dead = not self.is_alive()
 
         no_interface = self._shell.search_file(
-            'Not configured to listen on any interfaces', self._stdio_log_file)
+            "Not configured to listen on any interfaces", self._stdio_log_file
+        )
         if no_interface:
             raise NoInterfaceError(
-                'Dhcp does not contain a subnet for any of the networks the'
-                ' current interfaces are on.')
+                "Dhcp does not contain a subnet for any of the networks the"
+                " current interfaces are on."
+            )
 
         if should_be_up and is_dead:
-            raise Error('Dhcp server failed to start.', self)
+            raise Error("Dhcp server failed to start.", self)
 
     def _write_configs(self, config):
         """Writes the configs to the dhcp server config file."""
diff --git a/src/antlion/controllers/ap_lib/extended_capabilities.py b/src/antlion/controllers/ap_lib/extended_capabilities.py
index c7c8ade..82029cc 100644
--- a/src/antlion/controllers/ap_lib/extended_capabilities.py
+++ b/src/antlion/controllers/ap_lib/extended_capabilities.py
@@ -31,6 +31,7 @@
     has the value of its offset; comments indicate capabilities that use
     multiple bits.
     """
+
     TWENTY_FORTY_BSS_COEXISTENCE_MANAGEMENT_SUPPORT = 0
     GLK = 1
     EXTENDED_CHANNEL_SWITCHING = 2
@@ -161,11 +162,12 @@
             at this time.
         """
         if ext_cap in [
-                ExtendedCapability.SERVICE_INTERVAL_GRANULARITY,
-                ExtendedCapability.MAX_NUMBER_OF_MSDUS_IN_A_MSDU
+            ExtendedCapability.SERVICE_INTERVAL_GRANULARITY,
+            ExtendedCapability.MAX_NUMBER_OF_MSDUS_IN_A_MSDU,
         ]:
             raise NotImplementedError(
-                f'{ext_cap.name} not implemented yet by {__class__}')
+                f"{ext_cap.name} not implemented yet by {__class__}"
+            )
         byte_offset, bit_offset = _offsets(ext_cap)
         if len(self._ext_cap) > byte_offset:
             # Use bit_offset to derive a mask that will check the correct bit.
@@ -179,8 +181,7 @@
 
     @property
     def proxy_arp_service(self) -> bool:
-        return self._capability_advertised(
-            ExtendedCapability.PROXY_ARP_SERVICE)
+        return self._capability_advertised(ExtendedCapability.PROXY_ARP_SERVICE)
 
     @property
     def utc_tsf_offset(self) -> bool:
diff --git a/src/antlion/controllers/ap_lib/hostapd.py b/src/antlion/controllers/ap_lib/hostapd.py
index de93ea4..b3f780d 100644
--- a/src/antlion/controllers/ap_lib/hostapd.py
+++ b/src/antlion/controllers/ap_lib/hostapd.py
@@ -17,15 +17,20 @@
 import logging
 import re
 import time
-from typing import Set
 
-from antlion.controllers.ap_lib import hostapd_config
+from typing import Any, Dict, Optional, Set
+
 from antlion.controllers.ap_lib import hostapd_constants
 from antlion.controllers.ap_lib.extended_capabilities import ExtendedCapabilities
-from antlion.controllers.ap_lib.wireless_network_management import BssTransitionManagementRequest
+from antlion.controllers.ap_lib.wireless_network_management import (
+    BssTransitionManagementRequest,
+)
 from antlion.controllers.utils_lib.commands import shell
 from antlion.libs.proc.job import Result
 
+PROGRAM_FILE = "/usr/sbin/hostapd"
+CLI_PROGRAM_FILE = "/usr/bin/hostapd_cli"
+
 
 class Error(Exception):
     """An error caused by hostapd."""
@@ -38,15 +43,12 @@
         config: The hostapd configuration that is being used.
     """
 
-    PROGRAM_FILE = '/usr/sbin/hostapd'
-    CLI_PROGRAM_FILE = '/usr/bin/hostapd_cli'
-
-    def __init__(self, runner, interface, working_dir='/tmp'):
+    def __init__(self, runner: Any, interface: str, working_dir: str = "/tmp") -> None:
         """
         Args:
             runner: Object that has run_async and run methods for executing
                     shell commands (e.g. connection.SshConnection)
-            interface: string, The name of the interface to use (eg. wlan0).
+            interface: The name of the interface to use (eg. wlan0).
             working_dir: The directory to work out of.
         """
         self._runner = runner
@@ -54,12 +56,17 @@
         self._working_dir = working_dir
         self.config = None
         self._shell = shell.ShellCommand(runner, working_dir)
-        self._log_file = 'hostapd-%s.log' % self._interface
-        self._ctrl_file = 'hostapd-%s.ctrl' % self._interface
-        self._config_file = 'hostapd-%s.conf' % self._interface
-        self._identifier = '%s.*%s' % (self.PROGRAM_FILE, self._config_file)
+        self._log_file = f"hostapd-{self._interface}.log"
+        self._ctrl_file = f"hostapd-{self._interface}.ctrl"
+        self._config_file = f"hostapd-{self._interface}.conf"
+        self._identifier = f"{PROGRAM_FILE}.*{self._config_file}"
 
-    def start(self, config, timeout=60, additional_parameters=None):
+    def start(
+        self,
+        config: Any,
+        timeout: int = 60,
+        additional_parameters: Optional[Dict[str, Any]] = None,
+    ) -> None:
         """Starts hostapd
 
         Starts the hostapd daemon and runs it in the background.
@@ -89,11 +96,9 @@
         self._shell.delete_file(self._config_file)
         self._write_configs(additional_parameters=additional_parameters)
 
-        hostapd_command = '%s -dd -t "%s"' % (self.PROGRAM_FILE,
-                                              self._config_file)
-        base_command = 'cd "%s"; %s' % (self._working_dir, hostapd_command)
-        job_str = 'rfkill unblock all; %s > "%s" 2>&1' %\
-                  (base_command, self._log_file)
+        hostapd_command = f'{PROGRAM_FILE} -dd -t "{self._config_file}"'
+        base_command = f'cd "{self._working_dir}"; {hostapd_command}'
+        job_str = f'rfkill unblock all; {base_command} > "{self._log_file}" 2>&1'
         self._runner.run_async(job_str)
 
         try:
@@ -103,12 +108,12 @@
             self.stop()
             raise
 
-    def stop(self):
+    def stop(self) -> None:
         """Kills the daemon if it is running."""
         if self.is_alive():
             self._shell.kill(self._identifier)
 
-    def channel_switch(self, channel_num):
+    def channel_switch(self, channel_num: int) -> None:
         """Switches to the given channel.
 
         Returns:
@@ -118,26 +123,25 @@
         try:
             channel_freq = hostapd_constants.FREQUENCY_MAP[channel_num]
         except KeyError:
-            raise ValueError('Invalid channel number {}'.format(channel_num))
+            raise ValueError(f"Invalid channel number {channel_num}")
         csa_beacon_count = 10
-        channel_switch_cmd = 'chan_switch {} {}'.format(
-            csa_beacon_count, channel_freq)
-        result = self._run_hostapd_cli_cmd(channel_switch_cmd)
+        channel_switch_cmd = f"chan_switch {csa_beacon_count} {channel_freq}"
+        self._run_hostapd_cli_cmd(channel_switch_cmd)
 
-    def get_current_channel(self):
+    def get_current_channel(self) -> int:
         """Returns the current channel number.
 
         Raises: See _run_hostapd_cli_cmd
         """
-        status_cmd = 'status'
+        status_cmd = "status"
         result = self._run_hostapd_cli_cmd(status_cmd)
-        match = re.search(r'^channel=(\d+)$', result.stdout, re.MULTILINE)
+        match = re.search(r"^channel=(\d+)$", result.stdout, re.MULTILINE)
         if not match:
-            raise Error('Current channel could not be determined')
+            raise Error("Current channel could not be determined")
         try:
             channel = int(match.group(1))
         except ValueError:
-            raise Error('Internal error: current channel could not be parsed')
+            raise Error("Internal error: current channel could not be parsed")
         return channel
 
     def _list_sta(self) -> Result:
@@ -147,7 +151,7 @@
             acts.libs.proc.job.Result containing the results of the command.
         Raises: See _run_hostapd_cli_cmd
         """
-        list_sta_cmd = 'list_sta'
+        list_sta_cmd = "list_sta"
         return self._run_hostapd_cli_cmd(list_sta_cmd)
 
     def get_stas(self) -> Set[str]:
@@ -156,7 +160,7 @@
         stas = set()
         for line in list_sta_result.stdout.splitlines():
             # Each line must be a valid MAC address. Capture it.
-            m = re.match(r'((?:[0-9A-Fa-f]{2}:){5}[0-9A-Fa-f]{2})', line)
+            m = re.match(r"((?:[0-9A-Fa-f]{2}:){5}[0-9A-Fa-f]{2})", line)
             if m:
                 stas.add(m.group(1))
         return stas
@@ -168,11 +172,10 @@
             acts.libs.proc.job.Result containing the results of the command.
         Raises: See _run_hostapd_cli_cmd
         """
-        sta_cmd = 'sta {}'.format(sta_mac)
+        sta_cmd = "sta {}".format(sta_mac)
         return self._run_hostapd_cli_cmd(sta_cmd)
 
-    def get_sta_extended_capabilities(self,
-                                      sta_mac: str) -> ExtendedCapabilities:
+    def get_sta_extended_capabilities(self, sta_mac: str) -> ExtendedCapabilities:
         """Get extended capabilities for the given STA, as seen by the AP.
 
         Args:
@@ -186,19 +189,18 @@
         # hostapd ext_capab field is a hex encoded string representation of the
         # 802.11 extended capabilities structure, each byte represented by two
         # chars (each byte having format %02x).
-        m = re.search(r'ext_capab=([0-9A-Faf]+)', sta_result.stdout,
-                      re.MULTILINE)
+        m = re.search(r"ext_capab=([0-9A-Faf]+)", sta_result.stdout, re.MULTILINE)
         if not m:
-            raise Error('Failed to get ext_capab from STA details')
+            raise Error("Failed to get ext_capab from STA details")
         raw_ext_capab = m.group(1)
         try:
             return ExtendedCapabilities(bytearray.fromhex(raw_ext_capab))
         except ValueError:
-            raise Error(
-                f'ext_capab contains invalid hex string repr {raw_ext_capab}')
+            raise Error(f"ext_capab contains invalid hex string repr {raw_ext_capab}")
 
-    def _bss_tm_req(self, client_mac: str,
-                    request: BssTransitionManagementRequest) -> Result:
+    def _bss_tm_req(
+        self, client_mac: str, request: BssTransitionManagementRequest
+    ) -> Result:
         """Send a hostapd BSS Transition Management request command to a STA.
 
         Args:
@@ -208,22 +210,22 @@
             acts.libs.proc.job.Result containing the results of the command.
         Raises: See _run_hostapd_cli_cmd
         """
-        bss_tm_req_cmd = f'bss_tm_req {client_mac}'
+        bss_tm_req_cmd = f"bss_tm_req {client_mac}"
 
         if request.abridged:
-            bss_tm_req_cmd += ' abridged=1'
+            bss_tm_req_cmd += " abridged=1"
         if request.bss_termination_included and request.bss_termination_duration:
-            bss_tm_req_cmd += f' bss_term={request.bss_termination_duration.duration}'
+            bss_tm_req_cmd += f" bss_term={request.bss_termination_duration.duration}"
         if request.disassociation_imminent:
-            bss_tm_req_cmd += ' disassoc_imminent=1'
+            bss_tm_req_cmd += " disassoc_imminent=1"
         if request.disassociation_timer is not None:
-            bss_tm_req_cmd += f' disassoc_timer={request.disassociation_timer}'
+            bss_tm_req_cmd += f" disassoc_timer={request.disassociation_timer}"
         if request.preferred_candidate_list_included:
-            bss_tm_req_cmd += ' pref=1'
+            bss_tm_req_cmd += " pref=1"
         if request.session_information_url:
-            bss_tm_req_cmd += f' url={request.session_information_url}'
+            bss_tm_req_cmd += f" url={request.session_information_url}"
         if request.validity_interval:
-            bss_tm_req_cmd += f' valid_int={request.validity_interval}'
+            bss_tm_req_cmd += f" valid_int={request.validity_interval}"
 
         # neighbor= can appear multiple times, so it requires special handling.
         for neighbor in request.candidate_list:
@@ -232,13 +234,15 @@
             op_class = neighbor.operating_class
             chan_num = neighbor.channel_number
             phy_type = int(neighbor.phy_type)
-            bss_tm_req_cmd += f' neighbor={bssid},{bssid_info},{op_class},{chan_num},{phy_type}'
+            bss_tm_req_cmd += (
+                f" neighbor={bssid},{bssid_info},{op_class},{chan_num},{phy_type}"
+            )
 
         return self._run_hostapd_cli_cmd(bss_tm_req_cmd)
 
     def send_bss_transition_management_req(
-            self, sta_mac: str,
-            request: BssTransitionManagementRequest) -> Result:
+        self, sta_mac: str, request: BssTransitionManagementRequest
+    ) -> Result:
         """Send a BSS Transition Management request to an associated STA.
 
         Args:
@@ -250,14 +254,14 @@
         """
         return self._bss_tm_req(sta_mac, request)
 
-    def is_alive(self):
+    def is_alive(self) -> bool:
         """
         Returns:
             True if the daemon is running.
         """
         return self._shell.is_alive(self._identifier)
 
-    def pull_logs(self):
+    def pull_logs(self) -> str:
         """Pulls the log files from where hostapd is running.
 
         Returns:
@@ -266,7 +270,7 @@
         # TODO: Auto pulling of logs when stop is called.
         return self._shell.read_file(self._log_file)
 
-    def _run_hostapd_cli_cmd(self, cmd):
+    def _run_hostapd_cli_cmd(self, cmd: str) -> Result:
         """Run the given hostapd_cli command.
 
         Runs the command, waits for the output (up to default timeout), and
@@ -283,12 +287,12 @@
             antlion.controllers.utils_lib.ssh.connection.CommandError: Ssh worked,
                 but the command had an error executing.
         """
-        hostapd_cli_job = 'cd {}; {} -p {} {}'.format(self._working_dir,
-                                                      self.CLI_PROGRAM_FILE,
-                                                      self._ctrl_file, cmd)
+        hostapd_cli_job = (
+            f"cd {self._working_dir}; " f"{CLI_PROGRAM_FILE} -p {self._ctrl_file} {cmd}"
+        )
         return self._runner.run(hostapd_cli_job)
 
-    def _wait_for_process(self, timeout=60):
+    def _wait_for_process(self, timeout: int = 60) -> None:
         """Waits for the process to come up.
 
         Waits until the hostapd process is found running, or there is
@@ -302,7 +306,7 @@
             self._scan_for_errors(False)
             time.sleep(0.1)
 
-    def _wait_for_interface(self, timeout=60):
+    def _wait_for_interface(self, timeout: int = 60) -> None:
         """Waits for hostapd to report that the interface is up.
 
         Waits until hostapd says the interface has been brought up or an
@@ -313,15 +317,14 @@
         start_time = time.time()
         while time.time() - start_time < timeout:
             time.sleep(0.1)
-            success = self._shell.search_file('Setup of interface done',
-                                              self._log_file)
+            success = self._shell.search_file("Setup of interface done", self._log_file)
             if success:
                 return
             self._scan_for_errors(False)
 
         self._scan_for_errors(True)
 
-    def _scan_for_errors(self, should_be_up):
+    def _scan_for_errors(self, should_be_up: bool) -> None:
         """Scans the hostapd log for any errors.
 
         Args:
@@ -335,42 +338,46 @@
         # Store this so that all other errors have priority.
         is_dead = not self.is_alive()
 
-        bad_config = self._shell.search_file('Interface initialization failed',
-                                             self._log_file)
+        bad_config = self._shell.search_file(
+            "Interface initialization failed", self._log_file
+        )
         if bad_config:
-            raise Error('Interface failed to start', self)
+            raise Error("Interface failed to start", self)
 
         bad_config = self._shell.search_file(
-            "Interface %s wasn't started" % self._interface, self._log_file)
+            f"Interface {self._interface} wasn't started", self._log_file
+        )
         if bad_config:
-            raise Error('Interface failed to start', self)
+            raise Error("Interface failed to start", self)
 
         if should_be_up and is_dead:
-            raise Error('Hostapd failed to start', self)
+            raise Error("Hostapd failed to start", self)
 
-    def _write_configs(self, additional_parameters=None):
+    def _write_configs(
+        self, additional_parameters: Optional[Dict[str, Any]] = None
+    ) -> None:
         """Writes the configs to the hostapd config file."""
         self._shell.delete_file(self._config_file)
 
         interface_configs = collections.OrderedDict()
-        interface_configs['interface'] = self._interface
-        interface_configs['ctrl_interface'] = self._ctrl_file
-        pairs = ('%s=%s' % (k, v) for k, v in interface_configs.items())
+        interface_configs["interface"] = self._interface
+        interface_configs["ctrl_interface"] = self._ctrl_file
+        pairs = (f"{k}={v}" for k, v in interface_configs.items())
 
         packaged_configs = self.config.package_configs()
         if additional_parameters:
             packaged_configs.append(additional_parameters)
         for packaged_config in packaged_configs:
-            config_pairs = ('%s=%s' % (k, v)
-                            for k, v in packaged_config.items()
-                            if v is not None)
+            config_pairs = (
+                f"{k}={v}" for k, v in packaged_config.items() if v is not None
+            )
             pairs = itertools.chain(pairs, config_pairs)
 
-        hostapd_conf = '\n'.join(pairs)
+        hostapd_conf = "\n".join(pairs)
 
-        logging.info('Writing %s' % self._config_file)
-        logging.debug('******************Start*******************')
-        logging.debug('\n%s' % hostapd_conf)
-        logging.debug('*******************End********************')
+        logging.info(f"Writing {self._config_file}")
+        logging.debug("******************Start*******************")
+        logging.debug(f"\n{hostapd_conf}")
+        logging.debug("*******************End********************")
 
         self._shell.write_file(self._config_file, hostapd_conf)
diff --git a/src/antlion/controllers/ap_lib/hostapd_ap_preset.py b/src/antlion/controllers/ap_lib/hostapd_ap_preset.py
index 28062f5..3b694c0 100644
--- a/src/antlion/controllers/ap_lib/hostapd_ap_preset.py
+++ b/src/antlion/controllers/ap_lib/hostapd_ap_preset.py
@@ -12,58 +12,58 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from typing import FrozenSet
+from typing import Any, FrozenSet, List, Optional
 
 from antlion import utils
-
-import antlion.controllers.ap_lib.third_party_ap_profiles.actiontec as actiontec
-import antlion.controllers.ap_lib.third_party_ap_profiles.asus as asus
-import antlion.controllers.ap_lib.third_party_ap_profiles.belkin as belkin
-import antlion.controllers.ap_lib.third_party_ap_profiles.linksys as linksys
-import antlion.controllers.ap_lib.third_party_ap_profiles.netgear as netgear
-import antlion.controllers.ap_lib.third_party_ap_profiles.securifi as securifi
-import antlion.controllers.ap_lib.third_party_ap_profiles.tplink as tplink
-
-from antlion.controllers.ap_lib import hostapd_config
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_utils
+from antlion.controllers.ap_lib import hostapd_config, hostapd_constants, hostapd_utils
+from antlion.controllers.ap_lib.hostapd_security import Security
+from antlion.controllers.ap_lib.third_party_ap_profiles import (
+    actiontec,
+    asus,
+    belkin,
+    linksys,
+    netgear,
+    securifi,
+    tplink,
+)
 
 
-def _get_or_default(var, default_value):
+def _get_or_default(var: Optional[Any], default_value: Any) -> Any:
     """Check variable and return non-null value.
 
-   Args:
-        var: Any variable.
-        default_value: Value to return if the var is None.
+    Args:
+         var: Any variable.
+         default_value: Value to return if the var is None.
 
-   Returns:
-        Variable value if not None, default value otherwise.
+    Returns:
+         Variable value if not None, default value otherwise.
     """
     return var if var is not None else default_value
 
 
 def create_ap_preset(
-        profile_name='whirlwind',
-        iface_wlan_2g=None,
-        iface_wlan_5g=None,
-        channel=None,
-        mode=None,
-        frequency=None,
-        security=None,
-        pmf_support=None,
-        ssid=None,
-        hidden=None,
-        dtim_period=None,
-        frag_threshold=None,
-        rts_threshold=None,
-        force_wmm=None,
-        beacon_interval=None,
-        short_preamble=None,
-        n_capabilities=None,
-        ac_capabilities=None,
-        vht_bandwidth=None,
-        wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
-        bss_settings=[]):
+    profile_name: str = "whirlwind",
+    iface_wlan_2g: Optional[str] = None,
+    iface_wlan_5g: Optional[str] = None,
+    channel: Optional[int] = None,
+    mode: Optional[str] = None,
+    frequency: Optional[int] = None,
+    security: Optional[Security] = None,
+    pmf_support: Optional[int] = None,
+    ssid: Optional[str] = None,
+    hidden: Optional[bool] = None,
+    dtim_period: Optional[int] = None,
+    frag_threshold: Optional[int] = None,
+    rts_threshold: Optional[int] = None,
+    force_wmm: Optional[bool] = None,
+    beacon_interval: Optional[int] = None,
+    short_preamble: Optional[bool] = None,
+    n_capabilities: Optional[List[Any]] = None,
+    ac_capabilities: Optional[List[Any]] = None,
+    vht_bandwidth: Optional[int] = None,
+    wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
+    bss_settings: List[Any] = [],
+):
     """AP preset config generator.  This a wrapper for hostapd_config but
        but supplies the default settings for the preset that is selected.
 
@@ -77,8 +77,8 @@
         channel: int, channel number.
         dtim: int, DTIM value of the AP, default is 2.
         frequency: int, frequency of channel.
-        security: Security, the secuirty settings to use.
-        ssid: string, The name of the ssid to brodcast.
+        security: The security settings to use.
+        ssid: string, The name of the ssid to broadcast.
         pmf_support: int, whether pmf is disabled, enabled, or required
         vht_bandwidth: VHT bandwidth for 11ac operation.
         bss_settings: The settings for all bss.
@@ -99,19 +99,17 @@
     """
 
     # Verify interfaces
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
 
     if channel:
         frequency = hostapd_config.get_frequency_for_channel(channel)
     elif frequency:
         channel = hostapd_config.get_channel_for_frequency(frequency)
     else:
-        raise ValueError('Specify either frequency or channel.')
+        raise ValueError("Specify either frequency or channel.")
 
-    if profile_name == 'whirlwind':
+    if profile_name == "whirlwind":
         # profile indicates phy mode is 11bgn for 2.4Ghz or 11acn for 5Ghz
         hidden = _get_or_default(hidden, False)
         force_wmm = _get_or_default(force_wmm, True)
@@ -123,14 +121,17 @@
         if frequency < 5000:
             interface = iface_wlan_2g
             mode = _get_or_default(mode, hostapd_constants.MODE_11N_MIXED)
-            n_capabilities = _get_or_default(n_capabilities, [
-                hostapd_constants.N_CAPABILITY_LDPC,
-                hostapd_constants.N_CAPABILITY_SGI20,
-                hostapd_constants.N_CAPABILITY_SGI40,
-                hostapd_constants.N_CAPABILITY_TX_STBC,
-                hostapd_constants.N_CAPABILITY_RX_STBC1,
-                hostapd_constants.N_CAPABILITY_DSSS_CCK_40
-            ])
+            n_capabilities = _get_or_default(
+                n_capabilities,
+                [
+                    hostapd_constants.N_CAPABILITY_LDPC,
+                    hostapd_constants.N_CAPABILITY_SGI20,
+                    hostapd_constants.N_CAPABILITY_SGI40,
+                    hostapd_constants.N_CAPABILITY_TX_STBC,
+                    hostapd_constants.N_CAPABILITY_RX_STBC1,
+                    hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
+                ],
+            )
             config = hostapd_config.HostapdConfig(
                 ssid=ssid,
                 hidden=hidden,
@@ -147,7 +148,8 @@
                 frag_threshold=frag_threshold,
                 rts_threshold=rts_threshold,
                 wnm_features=wnm_features,
-                bss_settings=bss_settings)
+                bss_settings=bss_settings,
+            )
         else:
             interface = iface_wlan_5g
             vht_bandwidth = _get_or_default(vht_bandwidth, 80)
@@ -164,32 +166,42 @@
             if not vht_bandwidth:
                 pass
             elif vht_bandwidth >= 40:
-                n_capabilities = _get_or_default(n_capabilities, [
-                    hostapd_constants.N_CAPABILITY_LDPC, extended_channel,
-                    hostapd_constants.N_CAPABILITY_SGI20,
-                    hostapd_constants.N_CAPABILITY_SGI40,
-                    hostapd_constants.N_CAPABILITY_TX_STBC,
-                    hostapd_constants.N_CAPABILITY_RX_STBC1
-                ])
+                n_capabilities = _get_or_default(
+                    n_capabilities,
+                    [
+                        hostapd_constants.N_CAPABILITY_LDPC,
+                        extended_channel,
+                        hostapd_constants.N_CAPABILITY_SGI20,
+                        hostapd_constants.N_CAPABILITY_SGI40,
+                        hostapd_constants.N_CAPABILITY_TX_STBC,
+                        hostapd_constants.N_CAPABILITY_RX_STBC1,
+                    ],
+                )
             else:
-                n_capabilities = _get_or_default(n_capabilities, [
-                    hostapd_constants.N_CAPABILITY_LDPC,
-                    hostapd_constants.N_CAPABILITY_SGI20,
-                    hostapd_constants.N_CAPABILITY_SGI40,
-                    hostapd_constants.N_CAPABILITY_TX_STBC,
-                    hostapd_constants.N_CAPABILITY_RX_STBC1,
-                    hostapd_constants.N_CAPABILITY_HT20
-                ])
-            ac_capabilities = _get_or_default(ac_capabilities, [
-                hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
-                hostapd_constants.AC_CAPABILITY_RXLDPC,
-                hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
-                hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
-                hostapd_constants.AC_CAPABILITY_RX_STBC_1,
-                hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
-                hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
-                hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN
-            ])
+                n_capabilities = _get_or_default(
+                    n_capabilities,
+                    [
+                        hostapd_constants.N_CAPABILITY_LDPC,
+                        hostapd_constants.N_CAPABILITY_SGI20,
+                        hostapd_constants.N_CAPABILITY_SGI40,
+                        hostapd_constants.N_CAPABILITY_TX_STBC,
+                        hostapd_constants.N_CAPABILITY_RX_STBC1,
+                        hostapd_constants.N_CAPABILITY_HT20,
+                    ],
+                )
+            ac_capabilities = _get_or_default(
+                ac_capabilities,
+                [
+                    hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
+                    hostapd_constants.AC_CAPABILITY_RXLDPC,
+                    hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
+                    hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
+                    hostapd_constants.AC_CAPABILITY_RX_STBC_1,
+                    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
+                    hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
+                    hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN,
+                ],
+            )
             config = hostapd_config.HostapdConfig(
                 ssid=ssid,
                 hidden=hidden,
@@ -207,56 +219,61 @@
                 rts_threshold=rts_threshold,
                 n_capabilities=n_capabilities,
                 ac_capabilities=ac_capabilities,
-                bss_settings=bss_settings)
-    elif profile_name == 'whirlwind_11ab_legacy':
+                bss_settings=bss_settings,
+            )
+    elif profile_name == "whirlwind_11ab_legacy":
         if frequency < 5000:
             mode = hostapd_constants.MODE_11B
         else:
             mode = hostapd_constants.MODE_11A
 
-        config = create_ap_preset(iface_wlan_2g=iface_wlan_2g,
-                                  iface_wlan_5g=iface_wlan_5g,
-                                  ssid=ssid,
-                                  channel=channel,
-                                  mode=mode,
-                                  security=security,
-                                  pmf_support=pmf_support,
-                                  hidden=hidden,
-                                  force_wmm=force_wmm,
-                                  beacon_interval=beacon_interval,
-                                  short_preamble=short_preamble,
-                                  dtim_period=dtim_period,
-                                  rts_threshold=rts_threshold,
-                                  frag_threshold=frag_threshold,
-                                  n_capabilities=[],
-                                  ac_capabilities=[],
-                                  vht_bandwidth=None,
-                                  wnm_features=wnm_features)
-    elif profile_name == 'whirlwind_11ag_legacy':
+        config = create_ap_preset(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            ssid=ssid,
+            channel=channel,
+            mode=mode,
+            security=security,
+            pmf_support=pmf_support,
+            hidden=hidden,
+            force_wmm=force_wmm,
+            beacon_interval=beacon_interval,
+            short_preamble=short_preamble,
+            dtim_period=dtim_period,
+            rts_threshold=rts_threshold,
+            frag_threshold=frag_threshold,
+            n_capabilities=[],
+            ac_capabilities=[],
+            vht_bandwidth=None,
+            wnm_features=wnm_features,
+        )
+    elif profile_name == "whirlwind_11ag_legacy":
         if frequency < 5000:
             mode = hostapd_constants.MODE_11G
         else:
             mode = hostapd_constants.MODE_11A
 
-        config = create_ap_preset(iface_wlan_2g=iface_wlan_2g,
-                                  iface_wlan_5g=iface_wlan_5g,
-                                  ssid=ssid,
-                                  channel=channel,
-                                  mode=mode,
-                                  security=security,
-                                  pmf_support=pmf_support,
-                                  hidden=hidden,
-                                  force_wmm=force_wmm,
-                                  beacon_interval=beacon_interval,
-                                  short_preamble=short_preamble,
-                                  dtim_period=dtim_period,
-                                  rts_threshold=rts_threshold,
-                                  frag_threshold=frag_threshold,
-                                  n_capabilities=[],
-                                  ac_capabilities=[],
-                                  vht_bandwidth=None,
-                                  wnm_features=wnm_features)
-    elif profile_name == 'mistral':
+        config = create_ap_preset(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            ssid=ssid,
+            channel=channel,
+            mode=mode,
+            security=security,
+            pmf_support=pmf_support,
+            hidden=hidden,
+            force_wmm=force_wmm,
+            beacon_interval=beacon_interval,
+            short_preamble=short_preamble,
+            dtim_period=dtim_period,
+            rts_threshold=rts_threshold,
+            frag_threshold=frag_threshold,
+            n_capabilities=[],
+            ac_capabilities=[],
+            vht_bandwidth=None,
+            wnm_features=wnm_features,
+        )
+    elif profile_name == "mistral":
         hidden = _get_or_default(hidden, False)
         force_wmm = _get_or_default(force_wmm, True)
         beacon_interval = _get_or_default(beacon_interval, 100)
@@ -268,27 +285,31 @@
         # Google IE
         # Country Code IE ('us' lowercase)
         vendor_elements = {
-            'vendor_elements':
-            'dd0cf4f5e80505ff0000ffffffff'
-            '070a75732024041e95051e00'
+            "vendor_elements": "dd0cf4f5e80505ff0000ffffffff" "070a75732024041e95051e00"
         }
-        default_configs = {'bridge': 'br-lan', 'iapp_interface': 'br-lan'}
+        default_configs = {"bridge": "br-lan", "iapp_interface": "br-lan"}
 
         if frequency < 5000:
             interface = iface_wlan_2g
             mode = _get_or_default(mode, hostapd_constants.MODE_11N_MIXED)
-            n_capabilities = _get_or_default(n_capabilities, [
-                hostapd_constants.N_CAPABILITY_LDPC,
-                hostapd_constants.N_CAPABILITY_SGI20,
-                hostapd_constants.N_CAPABILITY_SGI40,
-                hostapd_constants.N_CAPABILITY_TX_STBC,
-                hostapd_constants.N_CAPABILITY_RX_STBC1,
-                hostapd_constants.N_CAPABILITY_DSSS_CCK_40
-            ])
+            n_capabilities = _get_or_default(
+                n_capabilities,
+                [
+                    hostapd_constants.N_CAPABILITY_LDPC,
+                    hostapd_constants.N_CAPABILITY_SGI20,
+                    hostapd_constants.N_CAPABILITY_SGI40,
+                    hostapd_constants.N_CAPABILITY_TX_STBC,
+                    hostapd_constants.N_CAPABILITY_RX_STBC1,
+                    hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
+                ],
+            )
 
             additional_params = utils.merge_dicts(
-                vendor_elements, hostapd_constants.ENABLE_RRM_BEACON_REPORT,
-                hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT, default_configs)
+                vendor_elements,
+                hostapd_constants.ENABLE_RRM_BEACON_REPORT,
+                hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT,
+                default_configs,
+            )
             config = hostapd_config.HostapdConfig(
                 ssid=ssid,
                 hidden=hidden,
@@ -307,7 +328,8 @@
                 wnm_features=wnm_features,
                 bss_settings=bss_settings,
                 additional_parameters=additional_params,
-                set_ap_defaults_profile=profile_name)
+                set_ap_defaults_profile=profile_name,
+            )
         else:
             interface = iface_wlan_5g
             vht_bandwidth = _get_or_default(vht_bandwidth, 80)
@@ -321,41 +343,54 @@
                 mode = hostapd_constants.MODE_11N_MIXED
                 extended_channel = hostapd_constants.N_CAPABILITY_HT20
             if vht_bandwidth >= 40:
-                n_capabilities = _get_or_default(n_capabilities, [
-                    hostapd_constants.N_CAPABILITY_LDPC, extended_channel,
-                    hostapd_constants.N_CAPABILITY_SGI20,
-                    hostapd_constants.N_CAPABILITY_SGI40,
-                    hostapd_constants.N_CAPABILITY_TX_STBC,
-                    hostapd_constants.N_CAPABILITY_RX_STBC1
-                ])
+                n_capabilities = _get_or_default(
+                    n_capabilities,
+                    [
+                        hostapd_constants.N_CAPABILITY_LDPC,
+                        extended_channel,
+                        hostapd_constants.N_CAPABILITY_SGI20,
+                        hostapd_constants.N_CAPABILITY_SGI40,
+                        hostapd_constants.N_CAPABILITY_TX_STBC,
+                        hostapd_constants.N_CAPABILITY_RX_STBC1,
+                    ],
+                )
             else:
-                n_capabilities = _get_or_default(n_capabilities, [
-                    hostapd_constants.N_CAPABILITY_LDPC,
-                    hostapd_constants.N_CAPABILITY_SGI20,
-                    hostapd_constants.N_CAPABILITY_SGI40,
-                    hostapd_constants.N_CAPABILITY_TX_STBC,
-                    hostapd_constants.N_CAPABILITY_RX_STBC1,
-                    hostapd_constants.N_CAPABILITY_HT20
-                ])
-            ac_capabilities = _get_or_default(ac_capabilities, [
-                hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
-                hostapd_constants.AC_CAPABILITY_RXLDPC,
-                hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
-                hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
-                hostapd_constants.AC_CAPABILITY_RX_STBC_1,
-                hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
-                hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
-                hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN,
-                hostapd_constants.AC_CAPABILITY_SU_BEAMFORMER,
-                hostapd_constants.AC_CAPABILITY_SU_BEAMFORMEE,
-                hostapd_constants.AC_CAPABILITY_MU_BEAMFORMER,
-                hostapd_constants.AC_CAPABILITY_SOUNDING_DIMENSION_4,
-                hostapd_constants.AC_CAPABILITY_BF_ANTENNA_4
-            ])
+                n_capabilities = _get_or_default(
+                    n_capabilities,
+                    [
+                        hostapd_constants.N_CAPABILITY_LDPC,
+                        hostapd_constants.N_CAPABILITY_SGI20,
+                        hostapd_constants.N_CAPABILITY_SGI40,
+                        hostapd_constants.N_CAPABILITY_TX_STBC,
+                        hostapd_constants.N_CAPABILITY_RX_STBC1,
+                        hostapd_constants.N_CAPABILITY_HT20,
+                    ],
+                )
+            ac_capabilities = _get_or_default(
+                ac_capabilities,
+                [
+                    hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
+                    hostapd_constants.AC_CAPABILITY_RXLDPC,
+                    hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
+                    hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
+                    hostapd_constants.AC_CAPABILITY_RX_STBC_1,
+                    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
+                    hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
+                    hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN,
+                    hostapd_constants.AC_CAPABILITY_SU_BEAMFORMER,
+                    hostapd_constants.AC_CAPABILITY_SU_BEAMFORMEE,
+                    hostapd_constants.AC_CAPABILITY_MU_BEAMFORMER,
+                    hostapd_constants.AC_CAPABILITY_SOUNDING_DIMENSION_4,
+                    hostapd_constants.AC_CAPABILITY_BF_ANTENNA_4,
+                ],
+            )
 
             additional_params = utils.merge_dicts(
-                vendor_elements, hostapd_constants.ENABLE_RRM_BEACON_REPORT,
-                hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT, default_configs)
+                vendor_elements,
+                hostapd_constants.ENABLE_RRM_BEACON_REPORT,
+                hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT,
+                default_configs,
+            )
             config = hostapd_config.HostapdConfig(
                 ssid=ssid,
                 hidden=hidden,
@@ -376,111 +411,133 @@
                 wnm_features=wnm_features,
                 bss_settings=bss_settings,
                 additional_parameters=additional_params,
-                set_ap_defaults_profile=profile_name)
-    elif profile_name == 'actiontec_pk5000':
-        config = actiontec.actiontec_pk5000(iface_wlan_2g=iface_wlan_2g,
-                                            channel=channel,
-                                            ssid=ssid,
-                                            security=security)
-    elif profile_name == 'actiontec_mi424wr':
-        config = actiontec.actiontec_mi424wr(iface_wlan_2g=iface_wlan_2g,
-                                             channel=channel,
-                                             ssid=ssid,
-                                             security=security)
-    elif profile_name == 'asus_rtac66u':
-        config = asus.asus_rtac66u(iface_wlan_2g=iface_wlan_2g,
-                                   iface_wlan_5g=iface_wlan_5g,
-                                   channel=channel,
-                                   ssid=ssid,
-                                   security=security)
-    elif profile_name == 'asus_rtac86u':
-        config = asus.asus_rtac86u(iface_wlan_2g=iface_wlan_2g,
-                                   iface_wlan_5g=iface_wlan_5g,
-                                   channel=channel,
-                                   ssid=ssid,
-                                   security=security)
-    elif profile_name == 'asus_rtac5300':
-        config = asus.asus_rtac5300(iface_wlan_2g=iface_wlan_2g,
-                                    iface_wlan_5g=iface_wlan_5g,
-                                    channel=channel,
-                                    ssid=ssid,
-                                    security=security)
-    elif profile_name == 'asus_rtn56u':
-        config = asus.asus_rtn56u(iface_wlan_2g=iface_wlan_2g,
-                                  iface_wlan_5g=iface_wlan_5g,
-                                  channel=channel,
-                                  ssid=ssid,
-                                  security=security)
-    elif profile_name == 'asus_rtn66u':
-        config = asus.asus_rtn66u(iface_wlan_2g=iface_wlan_2g,
-                                  iface_wlan_5g=iface_wlan_5g,
-                                  channel=channel,
-                                  ssid=ssid,
-                                  security=security)
-    elif profile_name == 'belkin_f9k1001v5':
-        config = belkin.belkin_f9k1001v5(iface_wlan_2g=iface_wlan_2g,
-                                         channel=channel,
-                                         ssid=ssid,
-                                         security=security)
-    elif profile_name == 'linksys_ea4500':
-        config = linksys.linksys_ea4500(iface_wlan_2g=iface_wlan_2g,
-                                        iface_wlan_5g=iface_wlan_5g,
-                                        channel=channel,
-                                        ssid=ssid,
-                                        security=security)
-    elif profile_name == 'linksys_ea9500':
-        config = linksys.linksys_ea9500(iface_wlan_2g=iface_wlan_2g,
-                                        iface_wlan_5g=iface_wlan_5g,
-                                        channel=channel,
-                                        ssid=ssid,
-                                        security=security)
-    elif profile_name == 'linksys_wrt1900acv2':
-        config = linksys.linksys_wrt1900acv2(iface_wlan_2g=iface_wlan_2g,
-                                             iface_wlan_5g=iface_wlan_5g,
-                                             channel=channel,
-                                             ssid=ssid,
-                                             security=security)
-    elif profile_name == 'netgear_r7000':
-        config = netgear.netgear_r7000(iface_wlan_2g=iface_wlan_2g,
-                                       iface_wlan_5g=iface_wlan_5g,
-                                       channel=channel,
-                                       ssid=ssid,
-                                       security=security)
-    elif profile_name == 'netgear_wndr3400':
-        config = netgear.netgear_wndr3400(iface_wlan_2g=iface_wlan_2g,
-                                          iface_wlan_5g=iface_wlan_5g,
-                                          channel=channel,
-                                          ssid=ssid,
-                                          security=security)
-    elif profile_name == 'securifi_almond':
-        config = securifi.securifi_almond(iface_wlan_2g=iface_wlan_2g,
-                                          channel=channel,
-                                          ssid=ssid,
-                                          security=security)
-    elif profile_name == 'tplink_archerc5':
-        config = tplink.tplink_archerc5(iface_wlan_2g=iface_wlan_2g,
-                                        iface_wlan_5g=iface_wlan_5g,
-                                        channel=channel,
-                                        ssid=ssid,
-                                        security=security)
-    elif profile_name == 'tplink_archerc7':
-        config = tplink.tplink_archerc7(iface_wlan_2g=iface_wlan_2g,
-                                        iface_wlan_5g=iface_wlan_5g,
-                                        channel=channel,
-                                        ssid=ssid,
-                                        security=security)
-    elif profile_name == 'tplink_c1200':
-        config = tplink.tplink_c1200(iface_wlan_2g=iface_wlan_2g,
-                                     iface_wlan_5g=iface_wlan_5g,
-                                     channel=channel,
-                                     ssid=ssid,
-                                     security=security)
-    elif profile_name == 'tplink_tlwr940n':
-        config = tplink.tplink_tlwr940n(iface_wlan_2g=iface_wlan_2g,
-                                        channel=channel,
-                                        ssid=ssid,
-                                        security=security)
+                set_ap_defaults_profile=profile_name,
+            )
+    elif profile_name == "actiontec_pk5000":
+        config = actiontec.actiontec_pk5000(
+            iface_wlan_2g=iface_wlan_2g, channel=channel, ssid=ssid, security=security
+        )
+    elif profile_name == "actiontec_mi424wr":
+        config = actiontec.actiontec_mi424wr(
+            iface_wlan_2g=iface_wlan_2g, channel=channel, ssid=ssid, security=security
+        )
+    elif profile_name == "asus_rtac66u":
+        config = asus.asus_rtac66u(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "asus_rtac86u":
+        config = asus.asus_rtac86u(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "asus_rtac5300":
+        config = asus.asus_rtac5300(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "asus_rtn56u":
+        config = asus.asus_rtn56u(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "asus_rtn66u":
+        config = asus.asus_rtn66u(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "belkin_f9k1001v5":
+        config = belkin.belkin_f9k1001v5(
+            iface_wlan_2g=iface_wlan_2g, channel=channel, ssid=ssid, security=security
+        )
+    elif profile_name == "linksys_ea4500":
+        config = linksys.linksys_ea4500(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "linksys_ea9500":
+        config = linksys.linksys_ea9500(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "linksys_wrt1900acv2":
+        config = linksys.linksys_wrt1900acv2(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "netgear_r7000":
+        config = netgear.netgear_r7000(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "netgear_wndr3400":
+        config = netgear.netgear_wndr3400(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "securifi_almond":
+        config = securifi.securifi_almond(
+            iface_wlan_2g=iface_wlan_2g, channel=channel, ssid=ssid, security=security
+        )
+    elif profile_name == "tplink_archerc5":
+        config = tplink.tplink_archerc5(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "tplink_archerc7":
+        config = tplink.tplink_archerc7(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "tplink_c1200":
+        config = tplink.tplink_c1200(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "tplink_tlwr940n":
+        config = tplink.tplink_tlwr940n(
+            iface_wlan_2g=iface_wlan_2g, channel=channel, ssid=ssid, security=security
+        )
     else:
-        raise ValueError('Invalid ap model specified (%s)' % profile_name)
+        raise ValueError(f"Invalid ap model specified ({profile_name})")
 
     return config
diff --git a/src/antlion/controllers/ap_lib/hostapd_bss_settings.py b/src/antlion/controllers/ap_lib/hostapd_bss_settings.py
index 3d298f5..56a5422 100644
--- a/src/antlion/controllers/ap_lib/hostapd_bss_settings.py
+++ b/src/antlion/controllers/ap_lib/hostapd_bss_settings.py
@@ -37,12 +37,12 @@
     def generate_dict(self):
         """Returns: A dictionary of bss settings."""
         settings = collections.OrderedDict()
-        settings['bss'] = self.name
+        settings["bss"] = self.name
         if self.bssid:
-            settings['bssid'] = self.bssid
+            settings["bssid"] = self.bssid
         if self.ssid:
-            settings['ssid'] = self.ssid
-            settings['ignore_broadcast_ssid'] = 1 if self.hidden else 0
+            settings["ssid"] = self.ssid
+            settings["ignore_broadcast_ssid"] = 1 if self.hidden else 0
 
         if self.security:
             security_settings = self.security.generate_dict()
diff --git a/src/antlion/controllers/ap_lib/hostapd_config.py b/src/antlion/controllers/ap_lib/hostapd_config.py
index abb50c9..a886e04 100644
--- a/src/antlion/controllers/ap_lib/hostapd_config.py
+++ b/src/antlion/controllers/ap_lib/hostapd_config.py
@@ -21,16 +21,24 @@
 
 def ht40_plus_allowed(channel):
     """Returns: True iff HT40+ is enabled for this configuration."""
-    channel_supported = (channel in hostapd_constants.HT40_ALLOW_MAP[
-        hostapd_constants.N_CAPABILITY_HT40_PLUS_CHANNELS])
-    return (channel_supported)
+    channel_supported = (
+        channel
+        in hostapd_constants.HT40_ALLOW_MAP[
+            hostapd_constants.N_CAPABILITY_HT40_PLUS_CHANNELS
+        ]
+    )
+    return channel_supported
 
 
 def ht40_minus_allowed(channel):
     """Returns: True iff HT40- is enabled for this configuration."""
-    channel_supported = (channel in hostapd_constants.HT40_ALLOW_MAP[
-        hostapd_constants.N_CAPABILITY_HT40_MINUS_CHANNELS])
-    return (channel_supported)
+    channel_supported = (
+        channel
+        in hostapd_constants.HT40_ALLOW_MAP[
+            hostapd_constants.N_CAPABILITY_HT40_MINUS_CHANNELS
+        ]
+    )
+    return channel_supported
 
 
 def get_frequency_for_channel(channel):
@@ -43,12 +51,11 @@
         int, frequency in MHz associated with the channel.
 
     """
-    for frequency, channel_iter in \
-        hostapd_constants.CHANNEL_MAP.items():
+    for frequency, channel_iter in hostapd_constants.CHANNEL_MAP.items():
         if channel == channel_iter:
             return frequency
     else:
-        raise ValueError('Unknown channel value: %r.' % channel)
+        raise ValueError("Unknown channel value: %r." % channel)
 
 
 def get_channel_for_frequency(frequency):
@@ -72,61 +79,68 @@
 
     def _get_11ac_center_channel_from_channel(self, channel):
         """Returns the center channel of the selected channel band based
-           on the channel and channel bandwidth provided.
+        on the channel and channel bandwidth provided.
         """
         channel = int(channel)
         center_channel_delta = hostapd_constants.CENTER_CHANNEL_MAP[
-            self._vht_oper_chwidth]['delta']
+            self._vht_oper_chwidth
+        ]["delta"]
 
-        for channel_map in hostapd_constants.CENTER_CHANNEL_MAP[
-                self._vht_oper_chwidth]['channels']:
+        for channel_map in hostapd_constants.CENTER_CHANNEL_MAP[self._vht_oper_chwidth][
+            "channels"
+        ]:
             lower_channel_bound, upper_channel_bound = channel_map
             if lower_channel_bound <= channel <= upper_channel_bound:
                 return lower_channel_bound + center_channel_delta
-        raise ValueError('Invalid channel for {channel_width}.'.format(
-            channel_width=self._vht_oper_chwidth))
+        raise ValueError(
+            "Invalid channel for {channel_width}.".format(
+                channel_width=self._vht_oper_chwidth
+            )
+        )
 
     @property
     def _get_default_config(self):
         """Returns: dict of default options for hostapd."""
-        if self.set_ap_defaults_profile == 'mistral':
-            return collections.OrderedDict([
-                ('logger_syslog', '-1'),
-                ('logger_syslog_level', '0'),
-                # default RTS and frag threshold to ``off''
-                ('rts_threshold', None),
-                ('fragm_threshold', None),
-                ('driver', hostapd_constants.DRIVER_NAME)
-            ])
+        if self.set_ap_defaults_profile == "mistral":
+            return collections.OrderedDict(
+                [
+                    ("logger_syslog", "-1"),
+                    ("logger_syslog_level", "0"),
+                    # default RTS and frag threshold to ``off''
+                    ("rts_threshold", None),
+                    ("fragm_threshold", None),
+                    ("driver", hostapd_constants.DRIVER_NAME),
+                ]
+            )
         else:
-            return collections.OrderedDict([
-                ('logger_syslog', '-1'),
-                ('logger_syslog_level', '0'),
-                # default RTS and frag threshold to ``off''
-                ('rts_threshold', '2347'),
-                ('fragm_threshold', '2346'),
-                ('driver', hostapd_constants.DRIVER_NAME)
-            ])
+            return collections.OrderedDict(
+                [
+                    ("logger_syslog", "-1"),
+                    ("logger_syslog_level", "0"),
+                    # default RTS and frag threshold to ``off''
+                    ("rts_threshold", "2347"),
+                    ("fragm_threshold", "2346"),
+                    ("driver", hostapd_constants.DRIVER_NAME),
+                ]
+            )
 
     @property
     def _hostapd_ht_capabilities(self):
-        """Returns: string suitable for the ht_capab= line in a hostapd config.
-        """
+        """Returns: string suitable for the ht_capab= line in a hostapd config."""
         ret = []
         for cap in hostapd_constants.N_CAPABILITIES_MAPPING.keys():
             if cap in self._n_capabilities:
                 ret.append(hostapd_constants.N_CAPABILITIES_MAPPING[cap])
-        return ''.join(ret)
+        return "".join(ret)
 
     @property
     def _hostapd_vht_capabilities(self):
-        """Returns: string suitable for the vht_capab= line in a hostapd config.
-        """
+        """Returns: string suitable for the vht_capab= line in a hostapd config."""
         ret = []
         for cap in hostapd_constants.AC_CAPABILITIES_MAPPING.keys():
             if cap in self._ac_capabilities:
                 ret.append(hostapd_constants.AC_CAPABILITIES_MAPPING[cap])
-        return ''.join(ret)
+        return "".join(ret)
 
     @property
     def _require_ht(self):
@@ -152,19 +166,23 @@
             if self._frequency > 5000:
                 return hostapd_constants.MODE_11A
             return hostapd_constants.MODE_11G
-        raise ValueError('Invalid mode.')
+        raise ValueError("Invalid mode.")
 
     @property
     def is_11n(self):
         """Returns: True if we're trying to host an 802.11n network."""
-        return self._mode in (hostapd_constants.MODE_11N_MIXED,
-                              hostapd_constants.MODE_11N_PURE)
+        return self._mode in (
+            hostapd_constants.MODE_11N_MIXED,
+            hostapd_constants.MODE_11N_PURE,
+        )
 
     @property
     def is_11ac(self):
         """Returns: True if we're trying to host an 802.11ac network."""
-        return self._mode in (hostapd_constants.MODE_11AC_MIXED,
-                              hostapd_constants.MODE_11AC_PURE)
+        return self._mode in (
+            hostapd_constants.MODE_11AC_MIXED,
+            hostapd_constants.MODE_11AC_PURE,
+        )
 
     @property
     def channel(self):
@@ -203,7 +221,7 @@
 
         """
         if value not in hostapd_constants.CHANNEL_MAP:
-            raise ValueError('Tried to set an invalid frequency: %r.' % value)
+            raise ValueError("Tried to set an invalid frequency: %r." % value)
 
         self._frequency = value
 
@@ -273,12 +291,12 @@
             return None
 
         if ht40_plus_allowed(self.channel):
-            return 'HT40+'
+            return "HT40+"
 
         if ht40_minus_allowed(self.channel):
-            return 'HT40-'
+            return "HT40-"
 
-        return 'HT20'
+        return "HT20"
 
     @property
     def beacon_footer(self):
@@ -311,36 +329,37 @@
     def wnm_features(self, value: FrozenSet[hostapd_constants.WnmFeature]):
         self._wnm_features = value
 
-    def __init__(self,
-                 interface=None,
-                 mode=None,
-                 channel=None,
-                 frequency=None,
-                 n_capabilities=[],
-                 beacon_interval=None,
-                 dtim_period=None,
-                 frag_threshold=None,
-                 rts_threshold=None,
-                 short_preamble=None,
-                 ssid=None,
-                 hidden=False,
-                 security=None,
-                 bssid=None,
-                 force_wmm=None,
-                 pmf_support=None,
-                 obss_interval=None,
-                 vht_channel_width=None,
-                 vht_center_channel=None,
-                 ac_capabilities=[],
-                 beacon_footer='',
-                 spectrum_mgmt_required=None,
-                 scenario_name=None,
-                 min_streams=None,
-                 wnm_features: FrozenSet[
-                     hostapd_constants.WnmFeature] = frozenset(),
-                 bss_settings=[],
-                 additional_parameters={},
-                 set_ap_defaults_profile='whirlwind'):
+    def __init__(
+        self,
+        interface=None,
+        mode=None,
+        channel=None,
+        frequency=None,
+        n_capabilities=[],
+        beacon_interval=None,
+        dtim_period=None,
+        frag_threshold=None,
+        rts_threshold=None,
+        short_preamble=None,
+        ssid=None,
+        hidden=False,
+        security=None,
+        bssid=None,
+        force_wmm=None,
+        pmf_support=None,
+        obss_interval=None,
+        vht_channel_width=None,
+        vht_center_channel=None,
+        ac_capabilities=[],
+        beacon_footer="",
+        spectrum_mgmt_required=None,
+        scenario_name=None,
+        min_streams=None,
+        wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
+        bss_settings=[],
+        additional_parameters={},
+        set_ap_defaults_profile="whirlwind",
+    ):
         """Construct a HostapdConfig.
 
         You may specify channel or frequency, but not both.  Both options
@@ -390,16 +409,16 @@
         self.set_ap_defaults_profile = set_ap_defaults_profile
         self._interface = interface
         if channel is not None and frequency is not None:
-            raise ValueError('Specify either frequency or channel '
-                             'but not both.')
+            raise ValueError("Specify either frequency or channel " "but not both.")
 
         self._wmm_enabled = False
         unknown_caps = [
-            cap for cap in n_capabilities
+            cap
+            for cap in n_capabilities
             if cap not in hostapd_constants.N_CAPABILITIES_MAPPING
         ]
         if unknown_caps:
-            raise ValueError('Unknown capabilities: %r' % unknown_caps)
+            raise ValueError("Unknown capabilities: %r" % unknown_caps)
 
         self._frequency = None
         if channel:
@@ -407,8 +426,8 @@
         elif frequency:
             self.frequency = frequency
         else:
-            raise ValueError('Specify either frequency or channel.')
-        '''
+            raise ValueError("Specify either frequency or channel.")
+        """
         if set_ap_defaults_model:
             ap_default_config = hostapd_ap_default_configs.APDefaultConfig(
                 profile_name=set_ap_defaults_model, frequency=self.frequency)
@@ -422,7 +441,7 @@
                 n_capabilities = ap_default_config.n_capabilities
             if ap_default_config.ac_capabilities:
                 ap_default_config = ap_default_config.ac_capabilities
-        '''
+        """
 
         self._n_capabilities = set(n_capabilities)
         if self._n_capabilities:
@@ -432,8 +451,10 @@
         self._mode = mode
 
         if not self.supports_frequency(self.frequency):
-            raise ValueError('Configured a mode %s that does not support '
-                             'frequency %d' % (self._mode, self.frequency))
+            raise ValueError(
+                "Configured a mode %s that does not support "
+                "frequency %d" % (self._mode, self.frequency)
+            )
 
         self._beacon_interval = beacon_interval
         self._dtim_period = dtim_period
@@ -451,12 +472,17 @@
                 self._wmm_enabled = 0
         # Default PMF Values
         if pmf_support is None:
-            if (self.security and self.security.security_mode_string ==
-                    hostapd_constants.WPA3_STRING):
+            if (
+                self.security
+                and self.security.security_mode_string == hostapd_constants.WPA3_STRING
+            ):
                 # Set PMF required for WP3
                 self._pmf_support = hostapd_constants.PMF_SUPPORT_REQUIRED
-            elif (self.security and self.security.security_mode_string in
-                  hostapd_constants.WPA3_MODE_STRINGS):
+            elif (
+                self.security
+                and self.security.security_mode_string
+                in hostapd_constants.WPA3_MODE_STRINGS
+            ):
                 # Default PMF to enabled for WPA3 mixed modes (can be
                 # overwritten by explicitly provided value)
                 self._pmf_support = hostapd_constants.PMF_SUPPORT_ENABLED
@@ -465,37 +491,40 @@
                 # overwritten by explicitly provided value)
                 self._pmf_support = hostapd_constants.PMF_SUPPORT_DISABLED
         elif pmf_support not in hostapd_constants.PMF_SUPPORT_VALUES:
-            raise ValueError('Invalid value for pmf_support: %r' % pmf_support)
-        elif (pmf_support != hostapd_constants.PMF_SUPPORT_REQUIRED
-              and self.security and self.security.security_mode_string ==
-              hostapd_constants.WPA3_STRING):
-            raise ValueError('PMF support must be required with wpa3.')
+            raise ValueError("Invalid value for pmf_support: %r" % pmf_support)
+        elif (
+            pmf_support != hostapd_constants.PMF_SUPPORT_REQUIRED
+            and self.security
+            and self.security.security_mode_string == hostapd_constants.WPA3_STRING
+        ):
+            raise ValueError("PMF support must be required with wpa3.")
         else:
             self._pmf_support = pmf_support
         self._obss_interval = obss_interval
         if self.is_11ac:
-            if str(vht_channel_width) == '40' or str(
-                    vht_channel_width) == '20':
+            if str(vht_channel_width) == "40" or str(vht_channel_width) == "20":
                 self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_40
-            elif str(vht_channel_width) == '80':
+            elif str(vht_channel_width) == "80":
                 self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_80
-            elif str(vht_channel_width) == '160':
+            elif str(vht_channel_width) == "160":
                 self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_160
-            elif str(vht_channel_width) == '80+80':
+            elif str(vht_channel_width) == "80+80":
                 self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_80_80
             elif vht_channel_width is not None:
-                raise ValueError('Invalid channel width')
+                raise ValueError("Invalid channel width")
             else:
                 logging.warning(
-                    'No channel bandwidth specified.  Using 80MHz for 11ac.')
+                    "No channel bandwidth specified.  Using 80MHz for 11ac."
+                )
                 self._vht_oper_chwidth = 1
             if vht_center_channel is not None:
                 self._vht_oper_centr_freq_seg0_idx = vht_center_channel
             elif vht_channel_width == 20:
                 self._vht_oper_centr_freq_seg0_idx = channel
             else:
-                self._vht_oper_centr_freq_seg0_idx = self._get_11ac_center_channel_from_channel(
-                    self.channel)
+                self._vht_oper_centr_freq_seg0_idx = (
+                    self._get_11ac_center_channel_from_channel(self.channel)
+                )
             self._ac_capabilities = set(ac_capabilities)
         self._beacon_footer = beacon_footer
         self._spectrum_mgmt_required = spectrum_mgmt_required
@@ -507,21 +536,34 @@
         self._bss_lookup = collections.OrderedDict()
         for bss in bss_settings:
             if bss.name in self._bss_lookup:
-                raise ValueError('Cannot have multiple bss settings with the'
-                                 ' same name.')
+                raise ValueError(
+                    "Cannot have multiple bss settings with the" " same name."
+                )
             self._bss_lookup[bss.name] = bss
 
     def __repr__(self):
         return (
-            '%s(mode=%r, channel=%r, frequency=%r, '
-            'n_capabilities=%r, beacon_interval=%r, '
-            'dtim_period=%r, frag_threshold=%r, ssid=%r, bssid=%r, '
-            'wmm_enabled=%r, security_config=%r, '
-            'spectrum_mgmt_required=%r)' %
-            (self.__class__.__name__, self._mode, self.channel, self.frequency,
-             self._n_capabilities, self._beacon_interval, self._dtim_period,
-             self._frag_threshold, self._ssid, self._bssid, self._wmm_enabled,
-             self._security, self._spectrum_mgmt_required))
+            "%s(mode=%r, channel=%r, frequency=%r, "
+            "n_capabilities=%r, beacon_interval=%r, "
+            "dtim_period=%r, frag_threshold=%r, ssid=%r, bssid=%r, "
+            "wmm_enabled=%r, security_config=%r, "
+            "spectrum_mgmt_required=%r)"
+            % (
+                self.__class__.__name__,
+                self._mode,
+                self.channel,
+                self.frequency,
+                self._n_capabilities,
+                self._beacon_interval,
+                self._dtim_period,
+                self._frag_threshold,
+                self._ssid,
+                self._bssid,
+                self._wmm_enabled,
+                self._security,
+                self._spectrum_mgmt_required,
+            )
+        )
 
     def supports_channel(self, value):
         """Check whether channel is supported by the current hardware mode.
@@ -546,24 +588,38 @@
         if self._mode == hostapd_constants.MODE_11A and frequency < 5000:
             return False
 
-        if self._mode in (hostapd_constants.MODE_11B,
-                          hostapd_constants.MODE_11G) and frequency > 5000:
+        if (
+            self._mode in (hostapd_constants.MODE_11B, hostapd_constants.MODE_11G)
+            and frequency > 5000
+        ):
             return False
 
         if frequency not in hostapd_constants.CHANNEL_MAP:
             return False
 
         channel = hostapd_constants.CHANNEL_MAP[frequency]
-        supports_plus = (channel in hostapd_constants.HT40_ALLOW_MAP[
-            hostapd_constants.N_CAPABILITY_HT40_PLUS_CHANNELS])
-        supports_minus = (channel in hostapd_constants.HT40_ALLOW_MAP[
-            hostapd_constants.N_CAPABILITY_HT40_MINUS_CHANNELS])
-        if (hostapd_constants.N_CAPABILITY_HT40_PLUS in self._n_capabilities
-                and not supports_plus):
+        supports_plus = (
+            channel
+            in hostapd_constants.HT40_ALLOW_MAP[
+                hostapd_constants.N_CAPABILITY_HT40_PLUS_CHANNELS
+            ]
+        )
+        supports_minus = (
+            channel
+            in hostapd_constants.HT40_ALLOW_MAP[
+                hostapd_constants.N_CAPABILITY_HT40_MINUS_CHANNELS
+            ]
+        )
+        if (
+            hostapd_constants.N_CAPABILITY_HT40_PLUS in self._n_capabilities
+            and not supports_plus
+        ):
             return False
 
-        if (hostapd_constants.N_CAPABILITY_HT40_MINUS in self._n_capabilities
-                and not supports_minus):
+        if (
+            hostapd_constants.N_CAPABILITY_HT40_MINUS in self._n_capabilities
+            and not supports_minus
+        ):
             return False
 
         return True
@@ -575,7 +631,7 @@
             bss: The bss settings to add.
         """
         if bss.name in self._bss_lookup:
-            raise ValueError('A bss with the same name already exists.')
+            raise ValueError("A bss with the same name already exists.")
 
         self._bss_lookup[bss.name] = bss
 
@@ -594,52 +650,51 @@
         conf = self._get_default_config
 
         if self._interface:
-            conf['interface'] = self._interface
+            conf["interface"] = self._interface
         if self._bssid:
-            conf['bssid'] = self._bssid
+            conf["bssid"] = self._bssid
         if self._ssid:
-            conf['ssid'] = self._ssid
-            conf['ignore_broadcast_ssid'] = 1 if self._hidden else 0
-        conf['channel'] = self.channel
-        conf['hw_mode'] = self.hw_mode
+            conf["ssid"] = self._ssid
+            conf["ignore_broadcast_ssid"] = 1 if self._hidden else 0
+        conf["channel"] = self.channel
+        conf["hw_mode"] = self.hw_mode
         if self.is_11n or self.is_11ac:
-            conf['ieee80211n'] = 1
-            conf['ht_capab'] = self._hostapd_ht_capabilities
+            conf["ieee80211n"] = 1
+            conf["ht_capab"] = self._hostapd_ht_capabilities
         if self.is_11ac:
-            conf['ieee80211ac'] = 1
-            conf['vht_oper_chwidth'] = self._vht_oper_chwidth
-            conf['vht_oper_centr_freq_seg0_idx'] = \
-                    self._vht_oper_centr_freq_seg0_idx
-            conf['vht_capab'] = self._hostapd_vht_capabilities
+            conf["ieee80211ac"] = 1
+            conf["vht_oper_chwidth"] = self._vht_oper_chwidth
+            conf["vht_oper_centr_freq_seg0_idx"] = self._vht_oper_centr_freq_seg0_idx
+            conf["vht_capab"] = self._hostapd_vht_capabilities
         if self._wmm_enabled is not None:
-            conf['wmm_enabled'] = self._wmm_enabled
+            conf["wmm_enabled"] = self._wmm_enabled
         if self._require_ht:
-            conf['require_ht'] = 1
+            conf["require_ht"] = 1
         if self._require_vht:
-            conf['require_vht'] = 1
+            conf["require_vht"] = 1
         if self._beacon_interval:
-            conf['beacon_int'] = self._beacon_interval
+            conf["beacon_int"] = self._beacon_interval
         if self._dtim_period:
-            conf['dtim_period'] = self._dtim_period
+            conf["dtim_period"] = self._dtim_period
         if self._frag_threshold:
-            conf['fragm_threshold'] = self._frag_threshold
+            conf["fragm_threshold"] = self._frag_threshold
         if self._rts_threshold:
-            conf['rts_threshold'] = self._rts_threshold
+            conf["rts_threshold"] = self._rts_threshold
         if self._pmf_support:
-            conf['ieee80211w'] = self._pmf_support
+            conf["ieee80211w"] = self._pmf_support
         if self._obss_interval:
-            conf['obss_interval'] = self._obss_interval
+            conf["obss_interval"] = self._obss_interval
         if self._short_preamble:
-            conf['preamble'] = 1
+            conf["preamble"] = 1
         if self._spectrum_mgmt_required:
             # To set spectrum_mgmt_required, we must first set
             # local_pwr_constraint. And to set local_pwr_constraint,
             # we must first set ieee80211d. And to set ieee80211d, ...
             # Point being: order matters here.
-            conf['country_code'] = 'US'  # Required for local_pwr_constraint
-            conf['ieee80211d'] = 1  # Required for local_pwr_constraint
-            conf['local_pwr_constraint'] = 0  # No local constraint
-            conf['spectrum_mgmt_required'] = 1  # Requires local_pwr_constraint
+            conf["country_code"] = "US"  # Required for local_pwr_constraint
+            conf["ieee80211d"] = 1  # Required for local_pwr_constraint
+            conf["local_pwr_constraint"] = 0  # No local constraint
+            conf["spectrum_mgmt_required"] = 1  # Requires local_pwr_constraint
 
         if self._security:
             for k, v in self._security.generate_dict().items():
@@ -659,14 +714,15 @@
             elif wnm_feature == hostapd_constants.WnmFeature.WNM_SLEEP_MODE:
                 conf.update(hostapd_constants.ENABLE_WNM_SLEEP_MODE)
             elif wnm_feature == hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT:
-                conf.update(
-                    hostapd_constants.ENABLE_WNM_BSS_TRANSITION_MANAGEMENT)
+                conf.update(hostapd_constants.ENABLE_WNM_BSS_TRANSITION_MANAGEMENT)
             elif wnm_feature == hostapd_constants.WnmFeature.PROXY_ARP:
                 conf.update(hostapd_constants.ENABLE_WNM_PROXY_ARP)
-            elif wnm_feature == hostapd_constants.WnmFeature.IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST:
+            elif (
+                wnm_feature
+                == hostapd_constants.WnmFeature.IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST
+            ):
                 conf.update(
-                    hostapd_constants.
-                    ENABLE_WNM_IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST
+                    hostapd_constants.ENABLE_WNM_IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST
                 )
 
         if self._additional_parameters:
diff --git a/src/antlion/controllers/ap_lib/hostapd_constants.py b/src/antlion/controllers/ap_lib/hostapd_constants.py
index 3a95ffa..ae7ef85 100755
--- a/src/antlion/controllers/ap_lib/hostapd_constants.py
+++ b/src/antlion/controllers/ap_lib/hostapd_constants.py
@@ -18,8 +18,8 @@
 
 from enum import Enum, auto, unique
 
-BAND_2G = '2g'
-BAND_5G = '5g'
+BAND_2G = "2g"
+BAND_5G = "5g"
 CHANNEL_BANDWIDTH_20MHZ = 20
 CHANNEL_BANDWIDTH_40MHZ = 40
 CHANNEL_BANDWIDTH_80MHZ = 80
@@ -34,22 +34,22 @@
 MIN_WPA_PSK_LENGTH = 8
 MAX_WPA_PASSWORD_LENGTH = 63
 WPA_STRICT_REKEY = 1
-WPA_DEFAULT_CIPHER = 'TKIP'
-WPA2_DEFAULT_CIPER = 'CCMP'
+WPA_DEFAULT_CIPHER = "TKIP"
+WPA2_DEFAULT_CIPER = "CCMP"
 WPA_GROUP_KEY_ROTATION_TIME = 600
 WPA_STRICT_REKEY_DEFAULT = True
-WEP_STRING = 'wep'
-WPA_STRING = 'wpa'
-WPA2_STRING = 'wpa2'
-WPA_MIXED_STRING = 'wpa/wpa2'
-WPA3_STRING = 'wpa3'
-WPA2_WPA3_MIXED_STRING = 'wpa2/wpa3'
-WPA_WPA2_WPA3_MIXED_STRING = 'wpa/wpa2/wpa3'
-ENT_STRING = 'ent'
-ENT_KEY_MGMT = 'WPA-EAP'
-WPA_PSK_KEY_MGMT = 'WPA-PSK'
-SAE_KEY_MGMT = 'SAE'
-DUAL_WPA_PSK_SAE_KEY_MGMT = 'WPA-PSK SAE'
+WEP_STRING = "wep"
+WPA_STRING = "wpa"
+WPA2_STRING = "wpa2"
+WPA_MIXED_STRING = "wpa/wpa2"
+WPA3_STRING = "wpa3"
+WPA2_WPA3_MIXED_STRING = "wpa2/wpa3"
+WPA_WPA2_WPA3_MIXED_STRING = "wpa/wpa2/wpa3"
+ENT_STRING = "ent"
+ENT_KEY_MGMT = "WPA-EAP"
+WPA_PSK_KEY_MGMT = "WPA-PSK"
+SAE_KEY_MGMT = "SAE"
+DUAL_WPA_PSK_SAE_KEY_MGMT = "WPA-PSK SAE"
 SECURITY_STRING_TO_SECURITY_MODE_INT = {
     WPA_STRING: WPA1,
     WPA2_STRING: WPA2,
@@ -58,7 +58,7 @@
     WPA2_WPA3_MIXED_STRING: WPA3,
     WPA_WPA2_WPA3_MIXED_STRING: MIXED,
     WEP_STRING: WEP,
-    ENT_STRING: ENT
+    ENT_STRING: ENT,
 }
 SECURITY_STRING_TO_WPA_KEY_MGMT = {
     WPA_STRING: WPA_PSK_KEY_MGMT,
@@ -66,11 +66,9 @@
     WPA_MIXED_STRING: WPA_PSK_KEY_MGMT,
     WPA3_STRING: SAE_KEY_MGMT,
     WPA2_WPA3_MIXED_STRING: DUAL_WPA_PSK_SAE_KEY_MGMT,
-    WPA_WPA2_WPA3_MIXED_STRING: DUAL_WPA_PSK_SAE_KEY_MGMT
+    WPA_WPA2_WPA3_MIXED_STRING: DUAL_WPA_PSK_SAE_KEY_MGMT,
 }
-WPA3_MODE_STRINGS = {
-    WPA3_STRING, WPA2_WPA3_MIXED_STRING, WPA_WPA2_WPA3_MIXED_STRING
-}
+WPA3_MODE_STRINGS = {WPA3_STRING, WPA2_WPA3_MIXED_STRING, WPA_WPA2_WPA3_MIXED_STRING}
 
 SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY = {
     WEP_STRING: WEP_STRING,
@@ -79,16 +77,16 @@
     WPA_MIXED_STRING: WPA2_STRING,
     WPA3_STRING: WPA3_STRING,
     WPA2_WPA3_MIXED_STRING: WPA3_STRING,
-    WPA_WPA2_WPA3_MIXED_STRING: WPA3_STRING
+    WPA_WPA2_WPA3_MIXED_STRING: WPA3_STRING,
 }
 
 IEEE8021X = 1
-WLAN0_STRING = 'wlan0'
-WLAN1_STRING = 'wlan1'
-WLAN2_STRING = 'wlan2'
-WLAN3_STRING = 'wlan3'
-WLAN0_GALE = 'wlan-2400mhz'
-WLAN1_GALE = 'wlan-5000mhz'
+WLAN0_STRING = "wlan0"
+WLAN1_STRING = "wlan1"
+WLAN2_STRING = "wlan2"
+WLAN3_STRING = "wlan3"
+WLAN0_GALE = "wlan-2400mhz"
+WLAN1_GALE = "wlan-5000mhz"
 WEP_DEFAULT_KEY = 0
 WEP_HEX_LENGTH = [10, 26, 32, 58]
 WEP_STR_LENGTH = [5, 13, 16]
@@ -171,25 +169,48 @@
     5795: 159,
     5785: 157,
     5805: 161,
-    5825: 165
+    5825: 165,
 }
 FREQUENCY_MAP = {v: k for k, v in CHANNEL_MAP.items()}
 
 US_CHANNELS_2G = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
 US_CHANNELS_5G = [
-    36, 40, 44, 48, 52, 56, 60, 64, 100, 104, 108, 112, 116, 120, 124, 128,
-    132, 136, 140, 144, 149, 153, 157, 161, 165
+    36,
+    40,
+    44,
+    48,
+    52,
+    56,
+    60,
+    64,
+    100,
+    104,
+    108,
+    112,
+    116,
+    120,
+    124,
+    128,
+    132,
+    136,
+    140,
+    144,
+    149,
+    153,
+    157,
+    161,
+    165,
 ]
 
 LOWEST_5G_CHANNEL = 36
 
-MODE_11A = 'a'
-MODE_11B = 'b'
-MODE_11G = 'g'
-MODE_11N_MIXED = 'n-mixed'
-MODE_11N_PURE = 'n-only'
-MODE_11AC_MIXED = 'ac-mixed'
-MODE_11AC_PURE = 'ac-only'
+MODE_11A = "a"
+MODE_11B = "b"
+MODE_11G = "g"
+MODE_11N_MIXED = "n-mixed"
+MODE_11N_PURE = "n-only"
+MODE_11AC_MIXED = "ac-mixed"
+MODE_11AC_PURE = "ac-only"
 
 N_CAPABILITY_LDPC = object()
 N_CAPABILITY_HT20 = object()
@@ -210,29 +231,26 @@
 N_CAPABILITY_SMPS_STATIC = object()
 N_CAPABILITY_SMPS_DYNAMIC = object()
 N_CAPABILITIES_MAPPING = {
-    N_CAPABILITY_LDPC: '[LDPC]',
-    N_CAPABILITY_HT20: '[HT20]',
-    N_CAPABILITY_HT40_PLUS: '[HT40+]',
-    N_CAPABILITY_HT40_MINUS: '[HT40-]',
-    N_CAPABILITY_GREENFIELD: '[GF]',
-    N_CAPABILITY_SGI20: '[SHORT-GI-20]',
-    N_CAPABILITY_SGI40: '[SHORT-GI-40]',
-    N_CAPABILITY_TX_STBC: '[TX-STBC]',
-    N_CAPABILITY_RX_STBC1: '[RX-STBC1]',
-    N_CAPABILITY_RX_STBC12: '[RX-STBC12]',
-    N_CAPABILITY_RX_STBC123: '[RX-STBC123]',
-    N_CAPABILITY_DSSS_CCK_40: '[DSSS_CCK-40]',
-    N_CAPABILITY_LSIG_TXOP_PROT: '[LSIG-TXOP-PROT]',
-    N_CAPABILITY_40_INTOLERANT: '[40-INTOLERANT]',
-    N_CAPABILITY_MAX_AMSDU_7935: '[MAX-AMSDU-7935]',
-    N_CAPABILITY_DELAY_BLOCK_ACK: '[DELAYED-BA]',
-    N_CAPABILITY_SMPS_STATIC: '[SMPS-STATIC]',
-    N_CAPABILITY_SMPS_DYNAMIC: '[SMPS-DYNAMIC]'
+    N_CAPABILITY_LDPC: "[LDPC]",
+    N_CAPABILITY_HT20: "[HT20]",
+    N_CAPABILITY_HT40_PLUS: "[HT40+]",
+    N_CAPABILITY_HT40_MINUS: "[HT40-]",
+    N_CAPABILITY_GREENFIELD: "[GF]",
+    N_CAPABILITY_SGI20: "[SHORT-GI-20]",
+    N_CAPABILITY_SGI40: "[SHORT-GI-40]",
+    N_CAPABILITY_TX_STBC: "[TX-STBC]",
+    N_CAPABILITY_RX_STBC1: "[RX-STBC1]",
+    N_CAPABILITY_RX_STBC12: "[RX-STBC12]",
+    N_CAPABILITY_RX_STBC123: "[RX-STBC123]",
+    N_CAPABILITY_DSSS_CCK_40: "[DSSS_CCK-40]",
+    N_CAPABILITY_LSIG_TXOP_PROT: "[LSIG-TXOP-PROT]",
+    N_CAPABILITY_40_INTOLERANT: "[40-INTOLERANT]",
+    N_CAPABILITY_MAX_AMSDU_7935: "[MAX-AMSDU-7935]",
+    N_CAPABILITY_DELAY_BLOCK_ACK: "[DELAYED-BA]",
+    N_CAPABILITY_SMPS_STATIC: "[SMPS-STATIC]",
+    N_CAPABILITY_SMPS_DYNAMIC: "[SMPS-DYNAMIC]",
 }
-N_CAPABILITIES_MAPPING_INVERSE = {
-    v: k
-    for k, v in N_CAPABILITIES_MAPPING.items()
-}
+N_CAPABILITIES_MAPPING_INVERSE = {v: k for k, v in N_CAPABILITIES_MAPPING.items()}
 N_CAPABILITY_HT40_MINUS_CHANNELS = object()
 N_CAPABILITY_HT40_PLUS_CHANNELS = object()
 AC_CAPABILITY_VHT160 = object()
@@ -272,47 +290,44 @@
 AC_CAPABILITY_MAX_MPDU_7991 = object()
 AC_CAPABILITY_MAX_MPDU_11454 = object()
 AC_CAPABILITIES_MAPPING = {
-    AC_CAPABILITY_VHT160: '[VHT160]',
-    AC_CAPABILITY_VHT160_80PLUS80: '[VHT160-80PLUS80]',
-    AC_CAPABILITY_RXLDPC: '[RXLDPC]',
-    AC_CAPABILITY_SHORT_GI_80: '[SHORT-GI-80]',
-    AC_CAPABILITY_SHORT_GI_160: '[SHORT-GI-160]',
-    AC_CAPABILITY_TX_STBC_2BY1: '[TX-STBC-2BY1]',
-    AC_CAPABILITY_RX_STBC_1: '[RX-STBC-1]',
-    AC_CAPABILITY_RX_STBC_12: '[RX-STBC-12]',
-    AC_CAPABILITY_RX_STBC_123: '[RX-STBC-123]',
-    AC_CAPABILITY_RX_STBC_1234: '[RX-STBC-1234]',
-    AC_CAPABILITY_SU_BEAMFORMER: '[SU-BEAMFORMER]',
-    AC_CAPABILITY_SU_BEAMFORMEE: '[SU-BEAMFORMEE]',
-    AC_CAPABILITY_BF_ANTENNA_2: '[BF-ANTENNA-2]',
-    AC_CAPABILITY_BF_ANTENNA_3: '[BF-ANTENNA-3]',
-    AC_CAPABILITY_BF_ANTENNA_4: '[BF-ANTENNA-4]',
-    AC_CAPABILITY_SOUNDING_DIMENSION_2: '[SOUNDING-DIMENSION-2]',
-    AC_CAPABILITY_SOUNDING_DIMENSION_3: '[SOUNDING-DIMENSION-3]',
-    AC_CAPABILITY_SOUNDING_DIMENSION_4: '[SOUNDING-DIMENSION-4]',
-    AC_CAPABILITY_MU_BEAMFORMER: '[MU-BEAMFORMER]',
-    AC_CAPABILITY_MU_BEAMFORMEE: '[MU-BEAMFORMEE]',
-    AC_CAPABILITY_VHT_TXOP_PS: '[VHT-TXOP-PS]',
-    AC_CAPABILITY_HTC_VHT: '[HTC-VHT]',
-    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP0: '[MAX-A-MPDU-LEN-EXP0]',
-    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP1: '[MAX-A-MPDU-LEN-EXP1]',
-    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP2: '[MAX-A-MPDU-LEN-EXP2]',
-    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP3: '[MAX-A-MPDU-LEN-EXP3]',
-    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP4: '[MAX-A-MPDU-LEN-EXP4]',
-    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP5: '[MAX-A-MPDU-LEN-EXP5]',
-    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP6: '[MAX-A-MPDU-LEN-EXP6]',
-    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7: '[MAX-A-MPDU-LEN-EXP7]',
-    AC_CAPABILITY_VHT_LINK_ADAPT2: '[VHT-LINK-ADAPT2]',
-    AC_CAPABILITY_VHT_LINK_ADAPT3: '[VHT-LINK-ADAPT3]',
-    AC_CAPABILITY_RX_ANTENNA_PATTERN: '[RX-ANTENNA-PATTERN]',
-    AC_CAPABILITY_TX_ANTENNA_PATTERN: '[TX-ANTENNA-PATTERN]',
-    AC_CAPABILITY_MAX_MPDU_11454: '[MAX-MPDU-11454]',
-    AC_CAPABILITY_MAX_MPDU_7991: '[MAX-MPDU-7991]'
+    AC_CAPABILITY_VHT160: "[VHT160]",
+    AC_CAPABILITY_VHT160_80PLUS80: "[VHT160-80PLUS80]",
+    AC_CAPABILITY_RXLDPC: "[RXLDPC]",
+    AC_CAPABILITY_SHORT_GI_80: "[SHORT-GI-80]",
+    AC_CAPABILITY_SHORT_GI_160: "[SHORT-GI-160]",
+    AC_CAPABILITY_TX_STBC_2BY1: "[TX-STBC-2BY1]",
+    AC_CAPABILITY_RX_STBC_1: "[RX-STBC-1]",
+    AC_CAPABILITY_RX_STBC_12: "[RX-STBC-12]",
+    AC_CAPABILITY_RX_STBC_123: "[RX-STBC-123]",
+    AC_CAPABILITY_RX_STBC_1234: "[RX-STBC-1234]",
+    AC_CAPABILITY_SU_BEAMFORMER: "[SU-BEAMFORMER]",
+    AC_CAPABILITY_SU_BEAMFORMEE: "[SU-BEAMFORMEE]",
+    AC_CAPABILITY_BF_ANTENNA_2: "[BF-ANTENNA-2]",
+    AC_CAPABILITY_BF_ANTENNA_3: "[BF-ANTENNA-3]",
+    AC_CAPABILITY_BF_ANTENNA_4: "[BF-ANTENNA-4]",
+    AC_CAPABILITY_SOUNDING_DIMENSION_2: "[SOUNDING-DIMENSION-2]",
+    AC_CAPABILITY_SOUNDING_DIMENSION_3: "[SOUNDING-DIMENSION-3]",
+    AC_CAPABILITY_SOUNDING_DIMENSION_4: "[SOUNDING-DIMENSION-4]",
+    AC_CAPABILITY_MU_BEAMFORMER: "[MU-BEAMFORMER]",
+    AC_CAPABILITY_MU_BEAMFORMEE: "[MU-BEAMFORMEE]",
+    AC_CAPABILITY_VHT_TXOP_PS: "[VHT-TXOP-PS]",
+    AC_CAPABILITY_HTC_VHT: "[HTC-VHT]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP0: "[MAX-A-MPDU-LEN-EXP0]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP1: "[MAX-A-MPDU-LEN-EXP1]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP2: "[MAX-A-MPDU-LEN-EXP2]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP3: "[MAX-A-MPDU-LEN-EXP3]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP4: "[MAX-A-MPDU-LEN-EXP4]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP5: "[MAX-A-MPDU-LEN-EXP5]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP6: "[MAX-A-MPDU-LEN-EXP6]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7: "[MAX-A-MPDU-LEN-EXP7]",
+    AC_CAPABILITY_VHT_LINK_ADAPT2: "[VHT-LINK-ADAPT2]",
+    AC_CAPABILITY_VHT_LINK_ADAPT3: "[VHT-LINK-ADAPT3]",
+    AC_CAPABILITY_RX_ANTENNA_PATTERN: "[RX-ANTENNA-PATTERN]",
+    AC_CAPABILITY_TX_ANTENNA_PATTERN: "[TX-ANTENNA-PATTERN]",
+    AC_CAPABILITY_MAX_MPDU_11454: "[MAX-MPDU-11454]",
+    AC_CAPABILITY_MAX_MPDU_7991: "[MAX-MPDU-7991]",
 }
-AC_CAPABILITIES_MAPPING_INVERSE = {
-    v: k
-    for k, v in AC_CAPABILITIES_MAPPING.items()
-}
+AC_CAPABILITIES_MAPPING_INVERSE = {v: k for k, v in AC_CAPABILITIES_MAPPING.items()}
 VHT_CHANNEL_WIDTH_40 = 0
 VHT_CHANNEL_WIDTH_80 = 1
 VHT_CHANNEL_WIDTH_160 = 2
@@ -321,7 +336,7 @@
 VHT_CHANNEL = {
     40: VHT_CHANNEL_WIDTH_40,
     80: VHT_CHANNEL_WIDTH_80,
-    160: VHT_CHANNEL_WIDTH_160
+    160: VHT_CHANNEL_WIDTH_160,
 }
 
 # This is a loose merging of the rules for US and EU regulatory
@@ -330,1026 +345,499 @@
 # tolerate HT40+ on channel 7 (not allowed in the US).  We take the loose
 # definition so that we don't prohibit testing in either domain.
 HT40_ALLOW_MAP = {
-    N_CAPABILITY_HT40_MINUS_CHANNELS:
-    tuple(
-        itertools.chain(range(6, 14), range(40, 65, 8), range(104, 145, 8),
-                        [153, 161])),
-    N_CAPABILITY_HT40_PLUS_CHANNELS:
-    tuple(
-        itertools.chain(range(1, 8), range(36, 61, 8), range(100, 141, 8),
-                        [149, 157]))
+    N_CAPABILITY_HT40_MINUS_CHANNELS: tuple(
+        itertools.chain(range(6, 14), range(40, 65, 8), range(104, 145, 8), [153, 161])
+    ),
+    N_CAPABILITY_HT40_PLUS_CHANNELS: tuple(
+        itertools.chain(range(1, 8), range(36, 61, 8), range(100, 141, 8), [149, 157])
+    ),
 }
 
 PMF_SUPPORT_DISABLED = 0
 PMF_SUPPORT_ENABLED = 1
 PMF_SUPPORT_REQUIRED = 2
-PMF_SUPPORT_VALUES = (PMF_SUPPORT_DISABLED, PMF_SUPPORT_ENABLED,
-                      PMF_SUPPORT_REQUIRED)
+PMF_SUPPORT_VALUES = (PMF_SUPPORT_DISABLED, PMF_SUPPORT_ENABLED, PMF_SUPPORT_REQUIRED)
 
-DRIVER_NAME = 'nl80211'
+DRIVER_NAME = "nl80211"
 
 CENTER_CHANNEL_MAP = {
     VHT_CHANNEL_WIDTH_40: {
-        'delta':
-        2,
-        'channels': ((36, 40), (44, 48), (52, 56), (60, 64), (100, 104),
-                     (108, 112), (116, 120), (124, 128), (132, 136),
-                     (140, 144), (149, 153), (157, 161))
+        "delta": 2,
+        "channels": (
+            (36, 40),
+            (44, 48),
+            (52, 56),
+            (60, 64),
+            (100, 104),
+            (108, 112),
+            (116, 120),
+            (124, 128),
+            (132, 136),
+            (140, 144),
+            (149, 153),
+            (157, 161),
+        ),
     },
     VHT_CHANNEL_WIDTH_80: {
-        'delta':
-        6,
-        'channels':
-        ((36, 48), (52, 64), (100, 112), (116, 128), (132, 144), (149, 161))
+        "delta": 6,
+        "channels": (
+            (36, 48),
+            (52, 64),
+            (100, 112),
+            (116, 128),
+            (132, 144),
+            (149, 161),
+        ),
     },
-    VHT_CHANNEL_WIDTH_160: {
-        'delta': 14,
-        'channels': ((36, 64), (100, 128))
-    }
+    VHT_CHANNEL_WIDTH_160: {"delta": 14, "channels": ((36, 64), (100, 128))},
 }
 
-OFDM_DATA_RATES = {'supported_rates': '60 90 120 180 240 360 480 540'}
+OFDM_DATA_RATES = {"supported_rates": "60 90 120 180 240 360 480 540"}
 
-CCK_DATA_RATES = {'supported_rates': '10 20 55 110'}
+CCK_DATA_RATES = {"supported_rates": "10 20 55 110"}
 
 CCK_AND_OFDM_DATA_RATES = {
-    'supported_rates': '10 20 55 110 60 90 120 180 240 360 480 540'
+    "supported_rates": "10 20 55 110 60 90 120 180 240 360 480 540"
 }
 
-OFDM_ONLY_BASIC_RATES = {'basic_rates': '60 120 240'}
+OFDM_ONLY_BASIC_RATES = {"basic_rates": "60 120 240"}
 
-CCK_AND_OFDM_BASIC_RATES = {'basic_rates': '10 20 55 110'}
+CCK_AND_OFDM_BASIC_RATES = {"basic_rates": "10 20 55 110"}
 
 WEP_AUTH = {
-    'open': {
-        'auth_algs': 1
-    },
-    'shared': {
-        'auth_algs': 2
-    },
-    'open_and_shared': {
-        'auth_algs': 3
-    }
+    "open": {"auth_algs": 1},
+    "shared": {"auth_algs": 2},
+    "open_and_shared": {"auth_algs": 3},
 }
 
 WMM_11B_DEFAULT_PARAMS = {
-    'wmm_ac_bk_cwmin': 5,
-    'wmm_ac_bk_cwmax': 10,
-    'wmm_ac_bk_aifs': 7,
-    'wmm_ac_bk_txop_limit': 0,
-    'wmm_ac_be_aifs': 3,
-    'wmm_ac_be_cwmin': 5,
-    'wmm_ac_be_cwmax': 7,
-    'wmm_ac_be_txop_limit': 0,
-    'wmm_ac_vi_aifs': 2,
-    'wmm_ac_vi_cwmin': 4,
-    'wmm_ac_vi_cwmax': 5,
-    'wmm_ac_vi_txop_limit': 188,
-    'wmm_ac_vo_aifs': 2,
-    'wmm_ac_vo_cwmin': 3,
-    'wmm_ac_vo_cwmax': 4,
-    'wmm_ac_vo_txop_limit': 102
+    "wmm_ac_bk_cwmin": 5,
+    "wmm_ac_bk_cwmax": 10,
+    "wmm_ac_bk_aifs": 7,
+    "wmm_ac_bk_txop_limit": 0,
+    "wmm_ac_be_aifs": 3,
+    "wmm_ac_be_cwmin": 5,
+    "wmm_ac_be_cwmax": 7,
+    "wmm_ac_be_txop_limit": 0,
+    "wmm_ac_vi_aifs": 2,
+    "wmm_ac_vi_cwmin": 4,
+    "wmm_ac_vi_cwmax": 5,
+    "wmm_ac_vi_txop_limit": 188,
+    "wmm_ac_vo_aifs": 2,
+    "wmm_ac_vo_cwmin": 3,
+    "wmm_ac_vo_cwmax": 4,
+    "wmm_ac_vo_txop_limit": 102,
 }
 
 WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS = {
-    'wmm_ac_bk_cwmin': 4,
-    'wmm_ac_bk_cwmax': 10,
-    'wmm_ac_bk_aifs': 7,
-    'wmm_ac_bk_txop_limit': 0,
-    'wmm_ac_be_aifs': 3,
-    'wmm_ac_be_cwmin': 4,
-    'wmm_ac_be_cwmax': 10,
-    'wmm_ac_be_txop_limit': 0,
-    'wmm_ac_vi_aifs': 2,
-    'wmm_ac_vi_cwmin': 3,
-    'wmm_ac_vi_cwmax': 4,
-    'wmm_ac_vi_txop_limit': 94,
-    'wmm_ac_vo_aifs': 2,
-    'wmm_ac_vo_cwmin': 2,
-    'wmm_ac_vo_cwmax': 3,
-    'wmm_ac_vo_txop_limit': 47
+    "wmm_ac_bk_cwmin": 4,
+    "wmm_ac_bk_cwmax": 10,
+    "wmm_ac_bk_aifs": 7,
+    "wmm_ac_bk_txop_limit": 0,
+    "wmm_ac_be_aifs": 3,
+    "wmm_ac_be_cwmin": 4,
+    "wmm_ac_be_cwmax": 10,
+    "wmm_ac_be_txop_limit": 0,
+    "wmm_ac_vi_aifs": 2,
+    "wmm_ac_vi_cwmin": 3,
+    "wmm_ac_vi_cwmax": 4,
+    "wmm_ac_vi_txop_limit": 94,
+    "wmm_ac_vo_aifs": 2,
+    "wmm_ac_vo_cwmin": 2,
+    "wmm_ac_vo_cwmax": 3,
+    "wmm_ac_vo_txop_limit": 47,
 }
 
 WMM_NON_DEFAULT_PARAMS = {
-    'wmm_ac_bk_cwmin': 5,
-    'wmm_ac_bk_cwmax': 9,
-    'wmm_ac_bk_aifs': 3,
-    'wmm_ac_bk_txop_limit': 94,
-    'wmm_ac_be_aifs': 2,
-    'wmm_ac_be_cwmin': 2,
-    'wmm_ac_be_cwmax': 8,
-    'wmm_ac_be_txop_limit': 0,
-    'wmm_ac_vi_aifs': 1,
-    'wmm_ac_vi_cwmin': 7,
-    'wmm_ac_vi_cwmax': 10,
-    'wmm_ac_vi_txop_limit': 47,
-    'wmm_ac_vo_aifs': 1,
-    'wmm_ac_vo_cwmin': 6,
-    'wmm_ac_vo_cwmax': 10,
-    'wmm_ac_vo_txop_limit': 94
+    "wmm_ac_bk_cwmin": 5,
+    "wmm_ac_bk_cwmax": 9,
+    "wmm_ac_bk_aifs": 3,
+    "wmm_ac_bk_txop_limit": 94,
+    "wmm_ac_be_aifs": 2,
+    "wmm_ac_be_cwmin": 2,
+    "wmm_ac_be_cwmax": 8,
+    "wmm_ac_be_txop_limit": 0,
+    "wmm_ac_vi_aifs": 1,
+    "wmm_ac_vi_cwmin": 7,
+    "wmm_ac_vi_cwmax": 10,
+    "wmm_ac_vi_txop_limit": 47,
+    "wmm_ac_vo_aifs": 1,
+    "wmm_ac_vo_cwmin": 6,
+    "wmm_ac_vo_cwmax": 10,
+    "wmm_ac_vo_txop_limit": 94,
 }
 
 WMM_DEGRADED_VO_PARAMS = {
-    'wmm_ac_bk_cwmin': 7,
-    'wmm_ac_bk_cwmax': 15,
-    'wmm_ac_bk_aifs': 2,
-    'wmm_ac_bk_txop_limit': 0,
-    'wmm_ac_be_aifs': 2,
-    'wmm_ac_be_cwmin': 7,
-    'wmm_ac_be_cwmax': 15,
-    'wmm_ac_be_txop_limit': 0,
-    'wmm_ac_vi_aifs': 2,
-    'wmm_ac_vi_cwmin': 7,
-    'wmm_ac_vi_cwmax': 15,
-    'wmm_ac_vi_txop_limit': 94,
-    'wmm_ac_vo_aifs': 10,
-    'wmm_ac_vo_cwmin': 7,
-    'wmm_ac_vo_cwmax': 15,
-    'wmm_ac_vo_txop_limit': 47
+    "wmm_ac_bk_cwmin": 7,
+    "wmm_ac_bk_cwmax": 15,
+    "wmm_ac_bk_aifs": 2,
+    "wmm_ac_bk_txop_limit": 0,
+    "wmm_ac_be_aifs": 2,
+    "wmm_ac_be_cwmin": 7,
+    "wmm_ac_be_cwmax": 15,
+    "wmm_ac_be_txop_limit": 0,
+    "wmm_ac_vi_aifs": 2,
+    "wmm_ac_vi_cwmin": 7,
+    "wmm_ac_vi_cwmax": 15,
+    "wmm_ac_vi_txop_limit": 94,
+    "wmm_ac_vo_aifs": 10,
+    "wmm_ac_vo_cwmin": 7,
+    "wmm_ac_vo_cwmax": 15,
+    "wmm_ac_vo_txop_limit": 47,
 }
 
 WMM_DEGRADED_VI_PARAMS = {
-    'wmm_ac_bk_cwmin': 7,
-    'wmm_ac_bk_cwmax': 15,
-    'wmm_ac_bk_aifs': 2,
-    'wmm_ac_bk_txop_limit': 0,
-    'wmm_ac_be_aifs': 2,
-    'wmm_ac_be_cwmin': 7,
-    'wmm_ac_be_cwmax': 15,
-    'wmm_ac_be_txop_limit': 0,
-    'wmm_ac_vi_aifs': 10,
-    'wmm_ac_vi_cwmin': 7,
-    'wmm_ac_vi_cwmax': 15,
-    'wmm_ac_vi_txop_limit': 94,
-    'wmm_ac_vo_aifs': 2,
-    'wmm_ac_vo_cwmin': 7,
-    'wmm_ac_vo_cwmax': 15,
-    'wmm_ac_vo_txop_limit': 47
+    "wmm_ac_bk_cwmin": 7,
+    "wmm_ac_bk_cwmax": 15,
+    "wmm_ac_bk_aifs": 2,
+    "wmm_ac_bk_txop_limit": 0,
+    "wmm_ac_be_aifs": 2,
+    "wmm_ac_be_cwmin": 7,
+    "wmm_ac_be_cwmax": 15,
+    "wmm_ac_be_txop_limit": 0,
+    "wmm_ac_vi_aifs": 10,
+    "wmm_ac_vi_cwmin": 7,
+    "wmm_ac_vi_cwmax": 15,
+    "wmm_ac_vi_txop_limit": 94,
+    "wmm_ac_vo_aifs": 2,
+    "wmm_ac_vo_cwmin": 7,
+    "wmm_ac_vo_cwmax": 15,
+    "wmm_ac_vo_txop_limit": 47,
 }
 
 WMM_IMPROVE_BE_PARAMS = {
-    'wmm_ac_bk_cwmin': 7,
-    'wmm_ac_bk_cwmax': 15,
-    'wmm_ac_bk_aifs': 10,
-    'wmm_ac_bk_txop_limit': 0,
-    'wmm_ac_be_aifs': 2,
-    'wmm_ac_be_cwmin': 7,
-    'wmm_ac_be_cwmax': 15,
-    'wmm_ac_be_txop_limit': 0,
-    'wmm_ac_vi_aifs': 10,
-    'wmm_ac_vi_cwmin': 7,
-    'wmm_ac_vi_cwmax': 15,
-    'wmm_ac_vi_txop_limit': 94,
-    'wmm_ac_vo_aifs': 10,
-    'wmm_ac_vo_cwmin': 7,
-    'wmm_ac_vo_cwmax': 15,
-    'wmm_ac_vo_txop_limit': 47
+    "wmm_ac_bk_cwmin": 7,
+    "wmm_ac_bk_cwmax": 15,
+    "wmm_ac_bk_aifs": 10,
+    "wmm_ac_bk_txop_limit": 0,
+    "wmm_ac_be_aifs": 2,
+    "wmm_ac_be_cwmin": 7,
+    "wmm_ac_be_cwmax": 15,
+    "wmm_ac_be_txop_limit": 0,
+    "wmm_ac_vi_aifs": 10,
+    "wmm_ac_vi_cwmin": 7,
+    "wmm_ac_vi_cwmax": 15,
+    "wmm_ac_vi_txop_limit": 94,
+    "wmm_ac_vo_aifs": 10,
+    "wmm_ac_vo_cwmin": 7,
+    "wmm_ac_vo_cwmax": 15,
+    "wmm_ac_vo_txop_limit": 47,
 }
 
 WMM_IMPROVE_BK_PARAMS = {
-    'wmm_ac_bk_cwmin': 7,
-    'wmm_ac_bk_cwmax': 15,
-    'wmm_ac_bk_aifs': 2,
-    'wmm_ac_bk_txop_limit': 0,
-    'wmm_ac_be_aifs': 10,
-    'wmm_ac_be_cwmin': 7,
-    'wmm_ac_be_cwmax': 15,
-    'wmm_ac_be_txop_limit': 0,
-    'wmm_ac_vi_aifs': 10,
-    'wmm_ac_vi_cwmin': 7,
-    'wmm_ac_vi_cwmax': 15,
-    'wmm_ac_vi_txop_limit': 94,
-    'wmm_ac_vo_aifs': 10,
-    'wmm_ac_vo_cwmin': 7,
-    'wmm_ac_vo_cwmax': 15,
-    'wmm_ac_vo_txop_limit': 47
+    "wmm_ac_bk_cwmin": 7,
+    "wmm_ac_bk_cwmax": 15,
+    "wmm_ac_bk_aifs": 2,
+    "wmm_ac_bk_txop_limit": 0,
+    "wmm_ac_be_aifs": 10,
+    "wmm_ac_be_cwmin": 7,
+    "wmm_ac_be_cwmax": 15,
+    "wmm_ac_be_txop_limit": 0,
+    "wmm_ac_vi_aifs": 10,
+    "wmm_ac_vi_cwmin": 7,
+    "wmm_ac_vi_cwmax": 15,
+    "wmm_ac_vi_txop_limit": 94,
+    "wmm_ac_vo_aifs": 10,
+    "wmm_ac_vo_cwmin": 7,
+    "wmm_ac_vo_cwmax": 15,
+    "wmm_ac_vo_txop_limit": 47,
 }
 
-WMM_ACM_BK = {'wmm_ac_bk_acm': 1}
-WMM_ACM_BE = {'wmm_ac_be_acm': 1}
-WMM_ACM_VI = {'wmm_ac_vi_acm': 1}
-WMM_ACM_VO = {'wmm_ac_vo_acm': 1}
+WMM_ACM_BK = {"wmm_ac_bk_acm": 1}
+WMM_ACM_BE = {"wmm_ac_be_acm": 1}
+WMM_ACM_VI = {"wmm_ac_vi_acm": 1}
+WMM_ACM_VO = {"wmm_ac_vo_acm": 1}
 
-UAPSD_ENABLED = {'uapsd_advertisement_enabled': 1}
+UAPSD_ENABLED = {"uapsd_advertisement_enabled": 1}
 
-UTF_8_SSID = {'utf8_ssid': 1}
+UTF_8_SSID = {"utf8_ssid": 1}
 
-ENABLE_RRM_BEACON_REPORT = {'rrm_beacon_report': 1}
-ENABLE_RRM_NEIGHBOR_REPORT = {'rrm_neighbor_report': 1}
+ENABLE_RRM_BEACON_REPORT = {"rrm_beacon_report": 1}
+ENABLE_RRM_NEIGHBOR_REPORT = {"rrm_neighbor_report": 1}
 
 # Wireless Network Management (AKA 802.11v) features.
-ENABLE_WNM_TIME_ADVERTISEMENT = {'time_advertisement': 2, 'time_zone': 'EST5'}
-ENABLE_WNM_SLEEP_MODE = {'wnm_sleep_mode': 1}
-ENABLE_WNM_BSS_TRANSITION_MANAGEMENT = {'bss_transition': 1}
-ENABLE_WNM_PROXY_ARP = {'proxy_arp': 1}
-ENABLE_WNM_IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST = {
-    'na_mcast_to_ucast': 1
-}
+ENABLE_WNM_TIME_ADVERTISEMENT = {"time_advertisement": 2, "time_zone": "EST5"}
+ENABLE_WNM_SLEEP_MODE = {"wnm_sleep_mode": 1}
+ENABLE_WNM_BSS_TRANSITION_MANAGEMENT = {"bss_transition": 1}
+ENABLE_WNM_PROXY_ARP = {"proxy_arp": 1}
+ENABLE_WNM_IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST = {"na_mcast_to_ucast": 1}
 
 VENDOR_IE = {
-    'correct_length_beacon': {
-        'vendor_elements': 'dd0411223301'
+    "correct_length_beacon": {"vendor_elements": "dd0411223301"},
+    "too_short_length_beacon": {"vendor_elements": "dd0311223301"},
+    "too_long_length_beacon": {"vendor_elements": "dd0511223301"},
+    "zero_length_beacon_with_data": {"vendor_elements": "dd0011223301"},
+    "zero_length_beacon_without_data": {"vendor_elements": "dd00"},
+    "simliar_to_wpa": {"vendor_elements": "dd040050f203"},
+    "correct_length_association_response": {"assocresp_elements": "dd0411223301"},
+    "too_short_length_association_response": {"assocresp_elements": "dd0311223301"},
+    "too_long_length_association_response": {"assocresp_elements": "dd0511223301"},
+    "zero_length_association_response_with_data": {
+        "assocresp_elements": "dd0011223301"
     },
-    'too_short_length_beacon': {
-        'vendor_elements': 'dd0311223301'
-    },
-    'too_long_length_beacon': {
-        'vendor_elements': 'dd0511223301'
-    },
-    'zero_length_beacon_with_data': {
-        'vendor_elements': 'dd0011223301'
-    },
-    'zero_length_beacon_without_data': {
-        'vendor_elements': 'dd00'
-    },
-    'simliar_to_wpa': {
-        'vendor_elements': 'dd040050f203'
-    },
-    'correct_length_association_response': {
-        'assocresp_elements': 'dd0411223301'
-    },
-    'too_short_length_association_response': {
-        'assocresp_elements': 'dd0311223301'
-    },
-    'too_long_length_association_response': {
-        'assocresp_elements': 'dd0511223301'
-    },
-    'zero_length_association_response_with_data': {
-        'assocresp_elements': 'dd0011223301'
-    },
-    'zero_length_association_response_without_data': {
-        'assocresp_elements': 'dd00'
-    }
+    "zero_length_association_response_without_data": {"assocresp_elements": "dd00"},
 }
 
-ENABLE_IEEE80211D = {'ieee80211d': 1}
+ENABLE_IEEE80211D = {"ieee80211d": 1}
 
 COUNTRY_STRING = {
-    'ALL': {
-        'country3': '0x20'
-    },
-    'OUTDOOR': {
-        'country3': '0x4f'
-    },
-    'INDOOR': {
-        'country3': '0x49'
-    },
-    'NONCOUNTRY': {
-        'country3': '0x58'
-    },
-    'GLOBAL': {
-        'country3': '0x04'
-    }
+    "ALL": {"country3": "0x20"},
+    "OUTDOOR": {"country3": "0x4f"},
+    "INDOOR": {"country3": "0x49"},
+    "NONCOUNTRY": {"country3": "0x58"},
+    "GLOBAL": {"country3": "0x04"},
 }
 
 COUNTRY_CODE = {
-    'AFGHANISTAN': {
-        'country_code': 'AF'
-    },
-    'ALAND_ISLANDS': {
-        'country_code': 'AX'
-    },
-    'ALBANIA': {
-        'country_code': 'AL'
-    },
-    'ALGERIA': {
-        'country_code': 'DZ'
-    },
-    'AMERICAN_SAMOA': {
-        'country_code': 'AS'
-    },
-    'ANDORRA': {
-        'country_code': 'AD'
-    },
-    'ANGOLA': {
-        'country_code': 'AO'
-    },
-    'ANGUILLA': {
-        'country_code': 'AI'
-    },
-    'ANTARCTICA': {
-        'country_code': 'AQ'
-    },
-    'ANTIGUA_AND_BARBUDA': {
-        'country_code': 'AG'
-    },
-    'ARGENTINA': {
-        'country_code': 'AR'
-    },
-    'ARMENIA': {
-        'country_code': 'AM'
-    },
-    'ARUBA': {
-        'country_code': 'AW'
-    },
-    'AUSTRALIA': {
-        'country_code': 'AU'
-    },
-    'AUSTRIA': {
-        'country_code': 'AT'
-    },
-    'AZERBAIJAN': {
-        'country_code': 'AZ'
-    },
-    'BAHAMAS': {
-        'country_code': 'BS'
-    },
-    'BAHRAIN': {
-        'country_code': 'BH'
-    },
-    'BANGLADESH': {
-        'country_code': 'BD'
-    },
-    'BARBADOS': {
-        'country_code': 'BB'
-    },
-    'BELARUS': {
-        'country_code': 'BY'
-    },
-    'BELGIUM': {
-        'country_code': 'BE'
-    },
-    'BELIZE': {
-        'country_code': 'BZ'
-    },
-    'BENIN': {
-        'country_code': 'BJ'
-    },
-    'BERMUDA': {
-        'country_code': 'BM'
-    },
-    'BHUTAN': {
-        'country_code': 'BT'
-    },
-    'BOLIVIA': {
-        'country_code': 'BO'
-    },
-    'BONAIRE': {
-        'country_code': 'BQ'
-    },
-    'BOSNIA_AND_HERZEGOVINA': {
-        'country_code': 'BA'
-    },
-    'BOTSWANA': {
-        'country_code': 'BW'
-    },
-    'BOUVET_ISLAND': {
-        'country_code': 'BV'
-    },
-    'BRAZIL': {
-        'country_code': 'BR'
-    },
-    'BRITISH_INDIAN_OCEAN_TERRITORY': {
-        'country_code': 'IO'
-    },
-    'BRUNEI_DARUSSALAM': {
-        'country_code': 'BN'
-    },
-    'BULGARIA': {
-        'country_code': 'BG'
-    },
-    'BURKINA_FASO': {
-        'country_code': 'BF'
-    },
-    'BURUNDI': {
-        'country_code': 'BI'
-    },
-    'CAMBODIA': {
-        'country_code': 'KH'
-    },
-    'CAMEROON': {
-        'country_code': 'CM'
-    },
-    'CANADA': {
-        'country_code': 'CA'
-    },
-    'CAPE_VERDE': {
-        'country_code': 'CV'
-    },
-    'CAYMAN_ISLANDS': {
-        'country_code': 'KY'
-    },
-    'CENTRAL_AFRICAN_REPUBLIC': {
-        'country_code': 'CF'
-    },
-    'CHAD': {
-        'country_code': 'TD'
-    },
-    'CHILE': {
-        'country_code': 'CL'
-    },
-    'CHINA': {
-        'country_code': 'CN'
-    },
-    'CHRISTMAS_ISLAND': {
-        'country_code': 'CX'
-    },
-    'COCOS_ISLANDS': {
-        'country_code': 'CC'
-    },
-    'COLOMBIA': {
-        'country_code': 'CO'
-    },
-    'COMOROS': {
-        'country_code': 'KM'
-    },
-    'CONGO': {
-        'country_code': 'CG'
-    },
-    'DEMOCRATIC_REPUBLIC_CONGO': {
-        'country_code': 'CD'
-    },
-    'COOK_ISLANDS': {
-        'country_code': 'CK'
-    },
-    'COSTA_RICA': {
-        'country_code': 'CR'
-    },
-    'COTE_D_IVOIRE': {
-        'country_code': 'CI'
-    },
-    'CROATIA': {
-        'country_code': 'HR'
-    },
-    'CUBA': {
-        'country_code': 'CU'
-    },
-    'CURACAO': {
-        'country_code': 'CW'
-    },
-    'CYPRUS': {
-        'country_code': 'CY'
-    },
-    'CZECH_REPUBLIC': {
-        'country_code': 'CZ'
-    },
-    'DENMARK': {
-        'country_code': 'DK'
-    },
-    'DJIBOUTI': {
-        'country_code': 'DJ'
-    },
-    'DOMINICA': {
-        'country_code': 'DM'
-    },
-    'DOMINICAN_REPUBLIC': {
-        'country_code': 'DO'
-    },
-    'ECUADOR': {
-        'country_code': 'EC'
-    },
-    'EGYPT': {
-        'country_code': 'EG'
-    },
-    'EL_SALVADOR': {
-        'country_code': 'SV'
-    },
-    'EQUATORIAL_GUINEA': {
-        'country_code': 'GQ'
-    },
-    'ERITREA': {
-        'country_code': 'ER'
-    },
-    'ESTONIA': {
-        'country_code': 'EE'
-    },
-    'ETHIOPIA': {
-        'country_code': 'ET'
-    },
-    'FALKLAND_ISLANDS_(MALVINAS)': {
-        'country_code': 'FK'
-    },
-    'FAROE_ISLANDS': {
-        'country_code': 'FO'
-    },
-    'FIJI': {
-        'country_code': 'FJ'
-    },
-    'FINLAND': {
-        'country_code': 'FI'
-    },
-    'FRANCE': {
-        'country_code': 'FR'
-    },
-    'FRENCH_GUIANA': {
-        'country_code': 'GF'
-    },
-    'FRENCH_POLYNESIA': {
-        'country_code': 'PF'
-    },
-    'FRENCH_SOUTHERN_TERRITORIES': {
-        'country_code': 'TF'
-    },
-    'GABON': {
-        'country_code': 'GA'
-    },
-    'GAMBIA': {
-        'country_code': 'GM'
-    },
-    'GEORGIA': {
-        'country_code': 'GE'
-    },
-    'GERMANY': {
-        'country_code': 'DE'
-    },
-    'GHANA': {
-        'country_code': 'GH'
-    },
-    'GIBRALTAR': {
-        'country_code': 'GI'
-    },
-    'GREECE': {
-        'country_code': 'GR'
-    },
-    'GREENLAND': {
-        'country_code': 'GL'
-    },
-    'GRENADA': {
-        'country_code': 'GD'
-    },
-    'GUADELOUPE': {
-        'country_code': 'GP'
-    },
-    'GUAM': {
-        'country_code': 'GU'
-    },
-    'GUATEMALA': {
-        'country_code': 'GT'
-    },
-    'GUERNSEY': {
-        'country_code': 'GG'
-    },
-    'GUINEA': {
-        'country_code': 'GN'
-    },
-    'GUINEA-BISSAU': {
-        'country_code': 'GW'
-    },
-    'GUYANA': {
-        'country_code': 'GY'
-    },
-    'HAITI': {
-        'country_code': 'HT'
-    },
-    'HEARD_ISLAND_AND_MCDONALD_ISLANDS': {
-        'country_code': 'HM'
-    },
-    'VATICAN_CITY_STATE': {
-        'country_code': 'VA'
-    },
-    'HONDURAS': {
-        'country_code': 'HN'
-    },
-    'HONG_KONG': {
-        'country_code': 'HK'
-    },
-    'HUNGARY': {
-        'country_code': 'HU'
-    },
-    'ICELAND': {
-        'country_code': 'IS'
-    },
-    'INDIA': {
-        'country_code': 'IN'
-    },
-    'INDONESIA': {
-        'country_code': 'ID'
-    },
-    'IRAN': {
-        'country_code': 'IR'
-    },
-    'IRAQ': {
-        'country_code': 'IQ'
-    },
-    'IRELAND': {
-        'country_code': 'IE'
-    },
-    'ISLE_OF_MAN': {
-        'country_code': 'IM'
-    },
-    'ISRAEL': {
-        'country_code': 'IL'
-    },
-    'ITALY': {
-        'country_code': 'IT'
-    },
-    'JAMAICA': {
-        'country_code': 'JM'
-    },
-    'JAPAN': {
-        'country_code': 'JP'
-    },
-    'JERSEY': {
-        'country_code': 'JE'
-    },
-    'JORDAN': {
-        'country_code': 'JO'
-    },
-    'KAZAKHSTAN': {
-        'country_code': 'KZ'
-    },
-    'KENYA': {
-        'country_code': 'KE'
-    },
-    'KIRIBATI': {
-        'country_code': 'KI'
-    },
-    'DEMOCRATIC_PEOPLE_S_REPUBLIC_OF_KOREA': {
-        'country_code': 'KP'
-    },
-    'REPUBLIC_OF_KOREA': {
-        'country_code': 'KR'
-    },
-    'KUWAIT': {
-        'country_code': 'KW'
-    },
-    'KYRGYZSTAN': {
-        'country_code': 'KG'
-    },
-    'LAO': {
-        'country_code': 'LA'
-    },
-    'LATVIA': {
-        'country_code': 'LV'
-    },
-    'LEBANON': {
-        'country_code': 'LB'
-    },
-    'LESOTHO': {
-        'country_code': 'LS'
-    },
-    'LIBERIA': {
-        'country_code': 'LR'
-    },
-    'LIBYA': {
-        'country_code': 'LY'
-    },
-    'LIECHTENSTEIN': {
-        'country_code': 'LI'
-    },
-    'LITHUANIA': {
-        'country_code': 'LT'
-    },
-    'LUXEMBOURG': {
-        'country_code': 'LU'
-    },
-    'MACAO': {
-        'country_code': 'MO'
-    },
-    'MACEDONIA': {
-        'country_code': 'MK'
-    },
-    'MADAGASCAR': {
-        'country_code': 'MG'
-    },
-    'MALAWI': {
-        'country_code': 'MW'
-    },
-    'MALAYSIA': {
-        'country_code': 'MY'
-    },
-    'MALDIVES': {
-        'country_code': 'MV'
-    },
-    'MALI': {
-        'country_code': 'ML'
-    },
-    'MALTA': {
-        'country_code': 'MT'
-    },
-    'MARSHALL_ISLANDS': {
-        'country_code': 'MH'
-    },
-    'MARTINIQUE': {
-        'country_code': 'MQ'
-    },
-    'MAURITANIA': {
-        'country_code': 'MR'
-    },
-    'MAURITIUS': {
-        'country_code': 'MU'
-    },
-    'MAYOTTE': {
-        'country_code': 'YT'
-    },
-    'MEXICO': {
-        'country_code': 'MX'
-    },
-    'MICRONESIA': {
-        'country_code': 'FM'
-    },
-    'MOLDOVA': {
-        'country_code': 'MD'
-    },
-    'MONACO': {
-        'country_code': 'MC'
-    },
-    'MONGOLIA': {
-        'country_code': 'MN'
-    },
-    'MONTENEGRO': {
-        'country_code': 'ME'
-    },
-    'MONTSERRAT': {
-        'country_code': 'MS'
-    },
-    'MOROCCO': {
-        'country_code': 'MA'
-    },
-    'MOZAMBIQUE': {
-        'country_code': 'MZ'
-    },
-    'MYANMAR': {
-        'country_code': 'MM'
-    },
-    'NAMIBIA': {
-        'country_code': 'NA'
-    },
-    'NAURU': {
-        'country_code': 'NR'
-    },
-    'NEPAL': {
-        'country_code': 'NP'
-    },
-    'NETHERLANDS': {
-        'country_code': 'NL'
-    },
-    'NEW_CALEDONIA': {
-        'country_code': 'NC'
-    },
-    'NEW_ZEALAND': {
-        'country_code': 'NZ'
-    },
-    'NICARAGUA': {
-        'country_code': 'NI'
-    },
-    'NIGER': {
-        'country_code': 'NE'
-    },
-    'NIGERIA': {
-        'country_code': 'NG'
-    },
-    'NIUE': {
-        'country_code': 'NU'
-    },
-    'NORFOLK_ISLAND': {
-        'country_code': 'NF'
-    },
-    'NORTHERN_MARIANA_ISLANDS': {
-        'country_code': 'MP'
-    },
-    'NORWAY': {
-        'country_code': 'NO'
-    },
-    'OMAN': {
-        'country_code': 'OM'
-    },
-    'PAKISTAN': {
-        'country_code': 'PK'
-    },
-    'PALAU': {
-        'country_code': 'PW'
-    },
-    'PALESTINE': {
-        'country_code': 'PS'
-    },
-    'PANAMA': {
-        'country_code': 'PA'
-    },
-    'PAPUA_NEW_GUINEA': {
-        'country_code': 'PG'
-    },
-    'PARAGUAY': {
-        'country_code': 'PY'
-    },
-    'PERU': {
-        'country_code': 'PE'
-    },
-    'PHILIPPINES': {
-        'country_code': 'PH'
-    },
-    'PITCAIRN': {
-        'country_code': 'PN'
-    },
-    'POLAND': {
-        'country_code': 'PL'
-    },
-    'PORTUGAL': {
-        'country_code': 'PT'
-    },
-    'PUERTO_RICO': {
-        'country_code': 'PR'
-    },
-    'QATAR': {
-        'country_code': 'QA'
-    },
-    'RÉUNION': {
-        'country_code': 'RE'
-    },
-    'ROMANIA': {
-        'country_code': 'RO'
-    },
-    'RUSSIAN_FEDERATION': {
-        'country_code': 'RU'
-    },
-    'RWANDA': {
-        'country_code': 'RW'
-    },
-    'SAINT_BARTHELEMY': {
-        'country_code': 'BL'
-    },
-    'SAINT_KITTS_AND_NEVIS': {
-        'country_code': 'KN'
-    },
-    'SAINT_LUCIA': {
-        'country_code': 'LC'
-    },
-    'SAINT_MARTIN': {
-        'country_code': 'MF'
-    },
-    'SAINT_PIERRE_AND_MIQUELON': {
-        'country_code': 'PM'
-    },
-    'SAINT_VINCENT_AND_THE_GRENADINES': {
-        'country_code': 'VC'
-    },
-    'SAMOA': {
-        'country_code': 'WS'
-    },
-    'SAN_MARINO': {
-        'country_code': 'SM'
-    },
-    'SAO_TOME_AND_PRINCIPE': {
-        'country_code': 'ST'
-    },
-    'SAUDI_ARABIA': {
-        'country_code': 'SA'
-    },
-    'SENEGAL': {
-        'country_code': 'SN'
-    },
-    'SERBIA': {
-        'country_code': 'RS'
-    },
-    'SEYCHELLES': {
-        'country_code': 'SC'
-    },
-    'SIERRA_LEONE': {
-        'country_code': 'SL'
-    },
-    'SINGAPORE': {
-        'country_code': 'SG'
-    },
-    'SINT_MAARTEN': {
-        'country_code': 'SX'
-    },
-    'SLOVAKIA': {
-        'country_code': 'SK'
-    },
-    'SLOVENIA': {
-        'country_code': 'SI'
-    },
-    'SOLOMON_ISLANDS': {
-        'country_code': 'SB'
-    },
-    'SOMALIA': {
-        'country_code': 'SO'
-    },
-    'SOUTH_AFRICA': {
-        'country_code': 'ZA'
-    },
-    'SOUTH_GEORGIA': {
-        'country_code': 'GS'
-    },
-    'SOUTH_SUDAN': {
-        'country_code': 'SS'
-    },
-    'SPAIN': {
-        'country_code': 'ES'
-    },
-    'SRI_LANKA': {
-        'country_code': 'LK'
-    },
-    'SUDAN': {
-        'country_code': 'SD'
-    },
-    'SURINAME': {
-        'country_code': 'SR'
-    },
-    'SVALBARD_AND_JAN_MAYEN': {
-        'country_code': 'SJ'
-    },
-    'SWAZILAND': {
-        'country_code': 'SZ'
-    },
-    'SWEDEN': {
-        'country_code': 'SE'
-    },
-    'SWITZERLAND': {
-        'country_code': 'CH'
-    },
-    'SYRIAN_ARAB_REPUBLIC': {
-        'country_code': 'SY'
-    },
-    'TAIWAN': {
-        'country_code': 'TW'
-    },
-    'TAJIKISTAN': {
-        'country_code': 'TJ'
-    },
-    'TANZANIA': {
-        'country_code': 'TZ'
-    },
-    'THAILAND': {
-        'country_code': 'TH'
-    },
-    'TIMOR-LESTE': {
-        'country_code': 'TL'
-    },
-    'TOGO': {
-        'country_code': 'TG'
-    },
-    'TOKELAU': {
-        'country_code': 'TK'
-    },
-    'TONGA': {
-        'country_code': 'TO'
-    },
-    'TRINIDAD_AND_TOBAGO': {
-        'country_code': 'TT'
-    },
-    'TUNISIA': {
-        'country_code': 'TN'
-    },
-    'TURKEY': {
-        'country_code': 'TR'
-    },
-    'TURKMENISTAN': {
-        'country_code': 'TM'
-    },
-    'TURKS_AND_CAICOS_ISLANDS': {
-        'country_code': 'TC'
-    },
-    'TUVALU': {
-        'country_code': 'TV'
-    },
-    'UGANDA': {
-        'country_code': 'UG'
-    },
-    'UKRAINE': {
-        'country_code': 'UA'
-    },
-    'UNITED_ARAB_EMIRATES': {
-        'country_code': 'AE'
-    },
-    'UNITED_KINGDOM': {
-        'country_code': 'GB'
-    },
-    'UNITED_STATES': {
-        'country_code': 'US'
-    },
-    'UNITED_STATES_MINOR_OUTLYING_ISLANDS': {
-        'country_code': 'UM'
-    },
-    'URUGUAY': {
-        'country_code': 'UY'
-    },
-    'UZBEKISTAN': {
-        'country_code': 'UZ'
-    },
-    'VANUATU': {
-        'country_code': 'VU'
-    },
-    'VENEZUELA': {
-        'country_code': 'VE'
-    },
-    'VIETNAM': {
-        'country_code': 'VN'
-    },
-    'VIRGIN_ISLANDS_BRITISH': {
-        'country_code': 'VG'
-    },
-    'VIRGIN_ISLANDS_US': {
-        'country_code': 'VI'
-    },
-    'WALLIS_AND_FUTUNA': {
-        'country_code': 'WF'
-    },
-    'WESTERN_SAHARA': {
-        'country_code': 'EH'
-    },
-    'YEMEN': {
-        'country_code': 'YE'
-    },
-    'ZAMBIA': {
-        'country_code': 'ZM'
-    },
-    'ZIMBABWE': {
-        'country_code': 'ZW'
-    },
-    'NON_COUNTRY': {
-        'country_code': 'XX'
-    }
+    "AFGHANISTAN": {"country_code": "AF"},
+    "ALAND_ISLANDS": {"country_code": "AX"},
+    "ALBANIA": {"country_code": "AL"},
+    "ALGERIA": {"country_code": "DZ"},
+    "AMERICAN_SAMOA": {"country_code": "AS"},
+    "ANDORRA": {"country_code": "AD"},
+    "ANGOLA": {"country_code": "AO"},
+    "ANGUILLA": {"country_code": "AI"},
+    "ANTARCTICA": {"country_code": "AQ"},
+    "ANTIGUA_AND_BARBUDA": {"country_code": "AG"},
+    "ARGENTINA": {"country_code": "AR"},
+    "ARMENIA": {"country_code": "AM"},
+    "ARUBA": {"country_code": "AW"},
+    "AUSTRALIA": {"country_code": "AU"},
+    "AUSTRIA": {"country_code": "AT"},
+    "AZERBAIJAN": {"country_code": "AZ"},
+    "BAHAMAS": {"country_code": "BS"},
+    "BAHRAIN": {"country_code": "BH"},
+    "BANGLADESH": {"country_code": "BD"},
+    "BARBADOS": {"country_code": "BB"},
+    "BELARUS": {"country_code": "BY"},
+    "BELGIUM": {"country_code": "BE"},
+    "BELIZE": {"country_code": "BZ"},
+    "BENIN": {"country_code": "BJ"},
+    "BERMUDA": {"country_code": "BM"},
+    "BHUTAN": {"country_code": "BT"},
+    "BOLIVIA": {"country_code": "BO"},
+    "BONAIRE": {"country_code": "BQ"},
+    "BOSNIA_AND_HERZEGOVINA": {"country_code": "BA"},
+    "BOTSWANA": {"country_code": "BW"},
+    "BOUVET_ISLAND": {"country_code": "BV"},
+    "BRAZIL": {"country_code": "BR"},
+    "BRITISH_INDIAN_OCEAN_TERRITORY": {"country_code": "IO"},
+    "BRUNEI_DARUSSALAM": {"country_code": "BN"},
+    "BULGARIA": {"country_code": "BG"},
+    "BURKINA_FASO": {"country_code": "BF"},
+    "BURUNDI": {"country_code": "BI"},
+    "CAMBODIA": {"country_code": "KH"},
+    "CAMEROON": {"country_code": "CM"},
+    "CANADA": {"country_code": "CA"},
+    "CAPE_VERDE": {"country_code": "CV"},
+    "CAYMAN_ISLANDS": {"country_code": "KY"},
+    "CENTRAL_AFRICAN_REPUBLIC": {"country_code": "CF"},
+    "CHAD": {"country_code": "TD"},
+    "CHILE": {"country_code": "CL"},
+    "CHINA": {"country_code": "CN"},
+    "CHRISTMAS_ISLAND": {"country_code": "CX"},
+    "COCOS_ISLANDS": {"country_code": "CC"},
+    "COLOMBIA": {"country_code": "CO"},
+    "COMOROS": {"country_code": "KM"},
+    "CONGO": {"country_code": "CG"},
+    "DEMOCRATIC_REPUBLIC_CONGO": {"country_code": "CD"},
+    "COOK_ISLANDS": {"country_code": "CK"},
+    "COSTA_RICA": {"country_code": "CR"},
+    "COTE_D_IVOIRE": {"country_code": "CI"},
+    "CROATIA": {"country_code": "HR"},
+    "CUBA": {"country_code": "CU"},
+    "CURACAO": {"country_code": "CW"},
+    "CYPRUS": {"country_code": "CY"},
+    "CZECH_REPUBLIC": {"country_code": "CZ"},
+    "DENMARK": {"country_code": "DK"},
+    "DJIBOUTI": {"country_code": "DJ"},
+    "DOMINICA": {"country_code": "DM"},
+    "DOMINICAN_REPUBLIC": {"country_code": "DO"},
+    "ECUADOR": {"country_code": "EC"},
+    "EGYPT": {"country_code": "EG"},
+    "EL_SALVADOR": {"country_code": "SV"},
+    "EQUATORIAL_GUINEA": {"country_code": "GQ"},
+    "ERITREA": {"country_code": "ER"},
+    "ESTONIA": {"country_code": "EE"},
+    "ETHIOPIA": {"country_code": "ET"},
+    "FALKLAND_ISLANDS_(MALVINAS)": {"country_code": "FK"},
+    "FAROE_ISLANDS": {"country_code": "FO"},
+    "FIJI": {"country_code": "FJ"},
+    "FINLAND": {"country_code": "FI"},
+    "FRANCE": {"country_code": "FR"},
+    "FRENCH_GUIANA": {"country_code": "GF"},
+    "FRENCH_POLYNESIA": {"country_code": "PF"},
+    "FRENCH_SOUTHERN_TERRITORIES": {"country_code": "TF"},
+    "GABON": {"country_code": "GA"},
+    "GAMBIA": {"country_code": "GM"},
+    "GEORGIA": {"country_code": "GE"},
+    "GERMANY": {"country_code": "DE"},
+    "GHANA": {"country_code": "GH"},
+    "GIBRALTAR": {"country_code": "GI"},
+    "GREECE": {"country_code": "GR"},
+    "GREENLAND": {"country_code": "GL"},
+    "GRENADA": {"country_code": "GD"},
+    "GUADELOUPE": {"country_code": "GP"},
+    "GUAM": {"country_code": "GU"},
+    "GUATEMALA": {"country_code": "GT"},
+    "GUERNSEY": {"country_code": "GG"},
+    "GUINEA": {"country_code": "GN"},
+    "GUINEA-BISSAU": {"country_code": "GW"},
+    "GUYANA": {"country_code": "GY"},
+    "HAITI": {"country_code": "HT"},
+    "HEARD_ISLAND_AND_MCDONALD_ISLANDS": {"country_code": "HM"},
+    "VATICAN_CITY_STATE": {"country_code": "VA"},
+    "HONDURAS": {"country_code": "HN"},
+    "HONG_KONG": {"country_code": "HK"},
+    "HUNGARY": {"country_code": "HU"},
+    "ICELAND": {"country_code": "IS"},
+    "INDIA": {"country_code": "IN"},
+    "INDONESIA": {"country_code": "ID"},
+    "IRAN": {"country_code": "IR"},
+    "IRAQ": {"country_code": "IQ"},
+    "IRELAND": {"country_code": "IE"},
+    "ISLE_OF_MAN": {"country_code": "IM"},
+    "ISRAEL": {"country_code": "IL"},
+    "ITALY": {"country_code": "IT"},
+    "JAMAICA": {"country_code": "JM"},
+    "JAPAN": {"country_code": "JP"},
+    "JERSEY": {"country_code": "JE"},
+    "JORDAN": {"country_code": "JO"},
+    "KAZAKHSTAN": {"country_code": "KZ"},
+    "KENYA": {"country_code": "KE"},
+    "KIRIBATI": {"country_code": "KI"},
+    "DEMOCRATIC_PEOPLE_S_REPUBLIC_OF_KOREA": {"country_code": "KP"},
+    "REPUBLIC_OF_KOREA": {"country_code": "KR"},
+    "KUWAIT": {"country_code": "KW"},
+    "KYRGYZSTAN": {"country_code": "KG"},
+    "LAO": {"country_code": "LA"},
+    "LATVIA": {"country_code": "LV"},
+    "LEBANON": {"country_code": "LB"},
+    "LESOTHO": {"country_code": "LS"},
+    "LIBERIA": {"country_code": "LR"},
+    "LIBYA": {"country_code": "LY"},
+    "LIECHTENSTEIN": {"country_code": "LI"},
+    "LITHUANIA": {"country_code": "LT"},
+    "LUXEMBOURG": {"country_code": "LU"},
+    "MACAO": {"country_code": "MO"},
+    "MACEDONIA": {"country_code": "MK"},
+    "MADAGASCAR": {"country_code": "MG"},
+    "MALAWI": {"country_code": "MW"},
+    "MALAYSIA": {"country_code": "MY"},
+    "MALDIVES": {"country_code": "MV"},
+    "MALI": {"country_code": "ML"},
+    "MALTA": {"country_code": "MT"},
+    "MARSHALL_ISLANDS": {"country_code": "MH"},
+    "MARTINIQUE": {"country_code": "MQ"},
+    "MAURITANIA": {"country_code": "MR"},
+    "MAURITIUS": {"country_code": "MU"},
+    "MAYOTTE": {"country_code": "YT"},
+    "MEXICO": {"country_code": "MX"},
+    "MICRONESIA": {"country_code": "FM"},
+    "MOLDOVA": {"country_code": "MD"},
+    "MONACO": {"country_code": "MC"},
+    "MONGOLIA": {"country_code": "MN"},
+    "MONTENEGRO": {"country_code": "ME"},
+    "MONTSERRAT": {"country_code": "MS"},
+    "MOROCCO": {"country_code": "MA"},
+    "MOZAMBIQUE": {"country_code": "MZ"},
+    "MYANMAR": {"country_code": "MM"},
+    "NAMIBIA": {"country_code": "NA"},
+    "NAURU": {"country_code": "NR"},
+    "NEPAL": {"country_code": "NP"},
+    "NETHERLANDS": {"country_code": "NL"},
+    "NEW_CALEDONIA": {"country_code": "NC"},
+    "NEW_ZEALAND": {"country_code": "NZ"},
+    "NICARAGUA": {"country_code": "NI"},
+    "NIGER": {"country_code": "NE"},
+    "NIGERIA": {"country_code": "NG"},
+    "NIUE": {"country_code": "NU"},
+    "NORFOLK_ISLAND": {"country_code": "NF"},
+    "NORTHERN_MARIANA_ISLANDS": {"country_code": "MP"},
+    "NORWAY": {"country_code": "NO"},
+    "OMAN": {"country_code": "OM"},
+    "PAKISTAN": {"country_code": "PK"},
+    "PALAU": {"country_code": "PW"},
+    "PALESTINE": {"country_code": "PS"},
+    "PANAMA": {"country_code": "PA"},
+    "PAPUA_NEW_GUINEA": {"country_code": "PG"},
+    "PARAGUAY": {"country_code": "PY"},
+    "PERU": {"country_code": "PE"},
+    "PHILIPPINES": {"country_code": "PH"},
+    "PITCAIRN": {"country_code": "PN"},
+    "POLAND": {"country_code": "PL"},
+    "PORTUGAL": {"country_code": "PT"},
+    "PUERTO_RICO": {"country_code": "PR"},
+    "QATAR": {"country_code": "QA"},
+    "RÉUNION": {"country_code": "RE"},
+    "ROMANIA": {"country_code": "RO"},
+    "RUSSIAN_FEDERATION": {"country_code": "RU"},
+    "RWANDA": {"country_code": "RW"},
+    "SAINT_BARTHELEMY": {"country_code": "BL"},
+    "SAINT_KITTS_AND_NEVIS": {"country_code": "KN"},
+    "SAINT_LUCIA": {"country_code": "LC"},
+    "SAINT_MARTIN": {"country_code": "MF"},
+    "SAINT_PIERRE_AND_MIQUELON": {"country_code": "PM"},
+    "SAINT_VINCENT_AND_THE_GRENADINES": {"country_code": "VC"},
+    "SAMOA": {"country_code": "WS"},
+    "SAN_MARINO": {"country_code": "SM"},
+    "SAO_TOME_AND_PRINCIPE": {"country_code": "ST"},
+    "SAUDI_ARABIA": {"country_code": "SA"},
+    "SENEGAL": {"country_code": "SN"},
+    "SERBIA": {"country_code": "RS"},
+    "SEYCHELLES": {"country_code": "SC"},
+    "SIERRA_LEONE": {"country_code": "SL"},
+    "SINGAPORE": {"country_code": "SG"},
+    "SINT_MAARTEN": {"country_code": "SX"},
+    "SLOVAKIA": {"country_code": "SK"},
+    "SLOVENIA": {"country_code": "SI"},
+    "SOLOMON_ISLANDS": {"country_code": "SB"},
+    "SOMALIA": {"country_code": "SO"},
+    "SOUTH_AFRICA": {"country_code": "ZA"},
+    "SOUTH_GEORGIA": {"country_code": "GS"},
+    "SOUTH_SUDAN": {"country_code": "SS"},
+    "SPAIN": {"country_code": "ES"},
+    "SRI_LANKA": {"country_code": "LK"},
+    "SUDAN": {"country_code": "SD"},
+    "SURINAME": {"country_code": "SR"},
+    "SVALBARD_AND_JAN_MAYEN": {"country_code": "SJ"},
+    "SWAZILAND": {"country_code": "SZ"},
+    "SWEDEN": {"country_code": "SE"},
+    "SWITZERLAND": {"country_code": "CH"},
+    "SYRIAN_ARAB_REPUBLIC": {"country_code": "SY"},
+    "TAIWAN": {"country_code": "TW"},
+    "TAJIKISTAN": {"country_code": "TJ"},
+    "TANZANIA": {"country_code": "TZ"},
+    "THAILAND": {"country_code": "TH"},
+    "TIMOR-LESTE": {"country_code": "TL"},
+    "TOGO": {"country_code": "TG"},
+    "TOKELAU": {"country_code": "TK"},
+    "TONGA": {"country_code": "TO"},
+    "TRINIDAD_AND_TOBAGO": {"country_code": "TT"},
+    "TUNISIA": {"country_code": "TN"},
+    "TURKEY": {"country_code": "TR"},
+    "TURKMENISTAN": {"country_code": "TM"},
+    "TURKS_AND_CAICOS_ISLANDS": {"country_code": "TC"},
+    "TUVALU": {"country_code": "TV"},
+    "UGANDA": {"country_code": "UG"},
+    "UKRAINE": {"country_code": "UA"},
+    "UNITED_ARAB_EMIRATES": {"country_code": "AE"},
+    "UNITED_KINGDOM": {"country_code": "GB"},
+    "UNITED_STATES": {"country_code": "US"},
+    "UNITED_STATES_MINOR_OUTLYING_ISLANDS": {"country_code": "UM"},
+    "URUGUAY": {"country_code": "UY"},
+    "UZBEKISTAN": {"country_code": "UZ"},
+    "VANUATU": {"country_code": "VU"},
+    "VENEZUELA": {"country_code": "VE"},
+    "VIETNAM": {"country_code": "VN"},
+    "VIRGIN_ISLANDS_BRITISH": {"country_code": "VG"},
+    "VIRGIN_ISLANDS_US": {"country_code": "VI"},
+    "WALLIS_AND_FUTUNA": {"country_code": "WF"},
+    "WESTERN_SAHARA": {"country_code": "EH"},
+    "YEMEN": {"country_code": "YE"},
+    "ZAMBIA": {"country_code": "ZM"},
+    "ZIMBABWE": {"country_code": "ZW"},
+    "NON_COUNTRY": {"country_code": "XX"},
 }
 
 ALL_CHANNELS_2G = {
@@ -1366,7 +854,7 @@
     11: {20, 40},
     12: {20, 40},
     13: {20, 40},
-    14: {20}
+    14: {20},
 }
 
 ALL_CHANNELS_5G = {
@@ -1394,7 +882,7 @@
     153: {20, 40, 80},
     157: {20, 40, 80},
     161: {20, 40, 80},
-    165: {20}
+    165: {20},
 }
 
 ALL_CHANNELS = {**ALL_CHANNELS_2G, **ALL_CHANNELS_5G}
@@ -1403,6 +891,7 @@
 @unique
 class WnmFeature(Enum):
     """Wireless Network Management (AKA 802.11v) features hostapd supports."""
+
     TIME_ADVERTISEMENT = auto()
     WNM_SLEEP_MODE = auto()
     BSS_TRANSITION_MANAGEMENT = auto()
diff --git a/src/antlion/controllers/ap_lib/hostapd_security.py b/src/antlion/controllers/ap_lib/hostapd_security.py
index 372ca44..69d5c2f 100644
--- a/src/antlion/controllers/ap_lib/hostapd_security.py
+++ b/src/antlion/controllers/ap_lib/hostapd_security.py
@@ -15,24 +15,29 @@
 import collections
 import string
 
+from typing import Dict, Optional, Union
+
 from antlion.controllers.ap_lib import hostapd_constants
 
 
 class Security(object):
     """The Security class for hostapd representing some of the security
-       settings that are allowed in hostapd.  If needed more can be added.
+    settings that are allowed in hostapd.  If needed more can be added.
     """
-    def __init__(self,
-                 security_mode=None,
-                 password=None,
-                 wpa_cipher=hostapd_constants.WPA_DEFAULT_CIPHER,
-                 wpa2_cipher=hostapd_constants.WPA2_DEFAULT_CIPER,
-                 wpa_group_rekey=hostapd_constants.WPA_GROUP_KEY_ROTATION_TIME,
-                 wpa_strict_rekey=hostapd_constants.WPA_STRICT_REKEY_DEFAULT,
-                 wep_default_key=hostapd_constants.WEP_DEFAULT_KEY,
-                 radius_server_ip=None,
-                 radius_server_port=None,
-                 radius_server_secret=None):
+
+    def __init__(
+        self,
+        security_mode: Optional[str] = None,
+        password: Optional[str] = None,
+        wpa_cipher: str = hostapd_constants.WPA_DEFAULT_CIPHER,
+        wpa2_cipher: str = hostapd_constants.WPA2_DEFAULT_CIPER,
+        wpa_group_rekey: int = hostapd_constants.WPA_GROUP_KEY_ROTATION_TIME,
+        wpa_strict_rekey: bool = hostapd_constants.WPA_STRICT_REKEY_DEFAULT,
+        wep_default_key: int = hostapd_constants.WEP_DEFAULT_KEY,
+        radius_server_ip: Optional[str] = None,
+        radius_server_port: Optional[int] = None,
+        radius_server_secret: Optional[str] = None,
+    ) -> None:
         """Gather all of the security settings for WPA-PSK.  This could be
            expanded later.
 
@@ -70,63 +75,80 @@
         self.radius_server_port = radius_server_port
         self.radius_server_secret = radius_server_secret
         self.security_mode = hostapd_constants.SECURITY_STRING_TO_SECURITY_MODE_INT.get(
-            security_mode, None)
+            security_mode, None
+        )
         if password:
             if self.security_mode == hostapd_constants.WEP:
                 if len(password) in hostapd_constants.WEP_STR_LENGTH:
                     self.password = '"%s"' % password
                 elif len(password) in hostapd_constants.WEP_HEX_LENGTH and all(
-                        c in string.hexdigits for c in password):
+                    c in string.hexdigits for c in password
+                ):
                     self.password = password
                 else:
                     raise ValueError(
-                        'WEP key must be a hex string of %s characters' %
-                        hostapd_constants.WEP_HEX_LENGTH)
+                        "WEP key must be a hex string of %s characters"
+                        % hostapd_constants.WEP_HEX_LENGTH
+                    )
             else:
-                if len(password) < hostapd_constants.MIN_WPA_PSK_LENGTH or len(
-                        password) > hostapd_constants.MAX_WPA_PSK_LENGTH:
+                if (
+                    len(password) < hostapd_constants.MIN_WPA_PSK_LENGTH
+                    or len(password) > hostapd_constants.MAX_WPA_PSK_LENGTH
+                ):
                     raise ValueError(
-                        'Password must be a minumum of %s characters and a maximum of %s'
-                        % (hostapd_constants.MIN_WPA_PSK_LENGTH,
-                           hostapd_constants.MAX_WPA_PSK_LENGTH))
+                        "Password must be a minumum of %s characters and a maximum of %s"
+                        % (
+                            hostapd_constants.MIN_WPA_PSK_LENGTH,
+                            hostapd_constants.MAX_WPA_PSK_LENGTH,
+                        )
+                    )
                 else:
                     self.password = password
 
-    def generate_dict(self):
+    def generate_dict(self) -> Dict[str, Union[str, int]]:
         """Returns: an ordered dictionary of settings"""
         settings = collections.OrderedDict()
         if self.security_mode is not None:
             if self.security_mode == hostapd_constants.WEP:
-                settings['wep_default_key'] = self.wep_default_key
-                settings['wep_key' + str(self.wep_default_key)] = self.password
+                settings["wep_default_key"] = self.wep_default_key
+                settings["wep_key" + str(self.wep_default_key)] = self.password
             elif self.security_mode == hostapd_constants.ENT:
-                settings['auth_server_addr'] = self.radius_server_ip
-                settings['auth_server_port'] = self.radius_server_port
-                settings[
-                    'auth_server_shared_secret'] = self.radius_server_secret
-                settings['wpa_key_mgmt'] = hostapd_constants.ENT_KEY_MGMT
-                settings['ieee8021x'] = hostapd_constants.IEEE8021X
-                settings['wpa'] = hostapd_constants.WPA2
+                settings["auth_server_addr"] = self.radius_server_ip
+                settings["auth_server_port"] = self.radius_server_port
+                settings["auth_server_shared_secret"] = self.radius_server_secret
+                settings["wpa_key_mgmt"] = hostapd_constants.ENT_KEY_MGMT
+                settings["ieee8021x"] = hostapd_constants.IEEE8021X
+                settings["wpa"] = hostapd_constants.WPA2
             else:
-                settings['wpa'] = self.security_mode
+                settings["wpa"] = self.security_mode
                 if len(self.password) == hostapd_constants.MAX_WPA_PSK_LENGTH:
-                    settings['wpa_psk'] = self.password
+                    settings["wpa_psk"] = self.password
                 else:
-                    settings['wpa_passphrase'] = self.password
+                    settings["wpa_passphrase"] = self.password
                 # For wpa, wpa/wpa2, and wpa/wpa2/wpa3, add wpa_pairwise
-                if self.security_mode == hostapd_constants.WPA1 or self.security_mode == hostapd_constants.MIXED:
-                    settings['wpa_pairwise'] = self.wpa_cipher
+                if (
+                    self.security_mode == hostapd_constants.WPA1
+                    or self.security_mode == hostapd_constants.MIXED
+                ):
+                    settings["wpa_pairwise"] = self.wpa_cipher
                 # For wpa/wpa2, wpa2, wpa3, and wpa2/wpa3, and wpa/wpa2, wpa3, add rsn_pairwise
-                if self.security_mode == hostapd_constants.WPA2 or self.security_mode == hostapd_constants.MIXED:
-                    settings['rsn_pairwise'] = self.wpa2_cipher
+                if (
+                    self.security_mode == hostapd_constants.WPA2
+                    or self.security_mode == hostapd_constants.MIXED
+                ):
+                    settings["rsn_pairwise"] = self.wpa2_cipher
                 # Add wpa_key_mgmt based on security mode string
-                if self.security_mode_string in hostapd_constants.SECURITY_STRING_TO_WPA_KEY_MGMT:
+                if (
+                    self.security_mode_string
+                    in hostapd_constants.SECURITY_STRING_TO_WPA_KEY_MGMT
+                ):
                     settings[
-                        'wpa_key_mgmt'] = hostapd_constants.SECURITY_STRING_TO_WPA_KEY_MGMT[
-                            self.security_mode_string]
+                        "wpa_key_mgmt"
+                    ] = hostapd_constants.SECURITY_STRING_TO_WPA_KEY_MGMT[
+                        self.security_mode_string
+                    ]
                 if self.wpa_group_rekey:
-                    settings['wpa_group_rekey'] = self.wpa_group_rekey
+                    settings["wpa_group_rekey"] = self.wpa_group_rekey
                 if self.wpa_strict_rekey:
-                    settings[
-                        'wpa_strict_rekey'] = hostapd_constants.WPA_STRICT_REKEY
+                    settings["wpa_strict_rekey"] = hostapd_constants.WPA_STRICT_REKEY
         return settings
diff --git a/src/antlion/controllers/ap_lib/hostapd_utils.py b/src/antlion/controllers/ap_lib/hostapd_utils.py
index 3387ed1..82331bf 100644
--- a/src/antlion/controllers/ap_lib/hostapd_utils.py
+++ b/src/antlion/controllers/ap_lib/hostapd_utils.py
@@ -48,9 +48,9 @@
         valid_interfaces: list of valid interface names
     """
     if not interface:
-        raise ValueError('Required wlan interface is missing.')
+        raise ValueError("Required wlan interface is missing.")
     if interface not in valid_interfaces:
-        raise ValueError('Invalid interface name was passed: %s' % interface)
+        raise ValueError("Invalid interface name was passed: %s" % interface)
 
 
 def verify_security_mode(security_profile, valid_security_modes):
@@ -63,12 +63,13 @@
     """
     if security_profile is None:
         if None not in valid_security_modes:
-            raise ValueError('Open security is not allowed for this profile.')
+            raise ValueError("Open security is not allowed for this profile.")
     elif security_profile.security_mode not in valid_security_modes:
         raise ValueError(
-            'Invalid Security Mode: %s. '
-            'Valid Security Modes for this profile: %s.' %
-            (security_profile.security_mode, valid_security_modes))
+            "Invalid Security Mode: %s. "
+            "Valid Security Modes for this profile: %s."
+            % (security_profile.security_mode, valid_security_modes)
+        )
 
 
 def verify_cipher(security_profile, valid_ciphers):
@@ -79,17 +80,20 @@
         valid_ciphers: a list of valid ciphers for a profile.
     """
     if security_profile is None:
-        raise ValueError('Security mode is open.')
+        raise ValueError("Security mode is open.")
     elif security_profile.security_mode == hostapd_constants.WPA1:
         if security_profile.wpa_cipher not in valid_ciphers:
-            raise ValueError('Invalid WPA Cipher: %s. '
-                             'Valid WPA Ciphers for this profile: %s' %
-                             (security_profile.wpa_cipher, valid_ciphers))
+            raise ValueError(
+                "Invalid WPA Cipher: %s. "
+                "Valid WPA Ciphers for this profile: %s"
+                % (security_profile.wpa_cipher, valid_ciphers)
+            )
     elif security_profile.security_mode == hostapd_constants.WPA2:
         if security_profile.wpa2_cipher not in valid_ciphers:
-            raise ValueError('Invalid WPA2 Cipher: %s. '
-                             'Valid WPA2 Ciphers for this profile: %s' %
-                             (security_profile.wpa2_cipher, valid_ciphers))
+            raise ValueError(
+                "Invalid WPA2 Cipher: %s. "
+                "Valid WPA2 Ciphers for this profile: %s"
+                % (security_profile.wpa2_cipher, valid_ciphers)
+            )
     else:
-        raise ValueError('Invalid Security Mode: %s' %
-                         security_profile.security_mode)
+        raise ValueError("Invalid Security Mode: %s" % security_profile.security_mode)
diff --git a/src/antlion/controllers/ap_lib/radio_measurement.py b/src/antlion/controllers/ap_lib/radio_measurement.py
index 254adc5..5c7f2e0 100644
--- a/src/antlion/controllers/ap_lib/radio_measurement.py
+++ b/src/antlion/controllers/ap_lib/radio_measurement.py
@@ -23,6 +23,7 @@
 
     See IEEE 802.11-2020 Figure 9-172.
     """
+
     NOT_REACHABLE = 1
     UNKNOWN = 2
     REACHABLE = 3
@@ -34,11 +35,13 @@
     See IEEE 802.11-2020 Figure 9-338 and 9.4.1.4.
     """
 
-    def __init__(self,
-                 spectrum_management: bool = False,
-                 qos: bool = False,
-                 apsd: bool = False,
-                 radio_measurement: bool = False):
+    def __init__(
+        self,
+        spectrum_management: bool = False,
+        qos: bool = False,
+        apsd: bool = False,
+        radio_measurement: bool = False,
+    ):
         """Create a capabilities object.
 
         Args:
@@ -54,10 +57,12 @@
 
     def __index__(self) -> int:
         """Convert to numeric representation of the field's bits."""
-        return self.spectrum_management << 5 \
-            | self.qos << 4 \
-            | self.apsd << 3 \
+        return (
+            self.spectrum_management << 5
+            | self.qos << 4
+            | self.apsd << 3
             | self.radio_measurement << 2
+        )
 
     @property
     def spectrum_management(self) -> bool:
@@ -83,16 +88,17 @@
     neighbor report element. See IEEE 802.11-2020 Figure 9-337.
     """
 
-    def __init__(self,
-                 ap_reachability: ApReachability = ApReachability.UNKNOWN,
-                 security: bool = False,
-                 key_scope: bool = False,
-                 capabilities:
-                 BssidInformationCapabilities = BssidInformationCapabilities(),
-                 mobility_domain: bool = False,
-                 high_throughput: bool = False,
-                 very_high_throughput: bool = False,
-                 ftm: bool = False):
+    def __init__(
+        self,
+        ap_reachability: ApReachability = ApReachability.UNKNOWN,
+        security: bool = False,
+        key_scope: bool = False,
+        capabilities: BssidInformationCapabilities = BssidInformationCapabilities(),
+        mobility_domain: bool = False,
+        high_throughput: bool = False,
+        very_high_throughput: bool = False,
+        ftm: bool = False,
+    ):
         """Create a BSSID Information object for a neighboring AP.
 
         Args:
@@ -128,14 +134,16 @@
 
     def __index__(self) -> int:
         """Convert to numeric representation of the field's bits."""
-        return self._ap_reachability << 30 \
-            | self.security << 29 \
-            | self.key_scope << 28 \
-            | int(self.capabilities) << 22 \
-            | self.mobility_domain << 21 \
-            | self.high_throughput << 20 \
-            | self.very_high_throughput << 19 \
+        return (
+            self._ap_reachability << 30
+            | self.security << 29
+            | self.key_scope << 28
+            | int(self.capabilities) << 22
+            | self.mobility_domain << 21
+            | self.high_throughput << 20
+            | self.very_high_throughput << 19
             | self.ftm << 18
+        )
 
     @property
     def security(self) -> bool:
@@ -169,6 +177,7 @@
 @unique
 class PhyType(IntEnum):
     """PHY type values, see dot11PhyType in 802.11-2020 Annex C."""
+
     DSSS = 2
     OFDM = 4
     HRDSS = 5
@@ -188,8 +197,14 @@
     See IEEE 802.11-2020 9.4.2.36.
     """
 
-    def __init__(self, bssid: str, bssid_information: BssidInformation,
-                 operating_class: int, channel_number: int, phy_type: PhyType):
+    def __init__(
+        self,
+        bssid: str,
+        bssid_information: BssidInformation,
+        operating_class: int,
+        channel_number: int,
+        phy_type: PhyType,
+    ):
         """Create a neighbor report element.
 
         Args:
diff --git a/src/antlion/controllers/ap_lib/radvd.py b/src/antlion/controllers/ap_lib/radvd.py
index 9761c44..216ad0e 100644
--- a/src/antlion/controllers/ap_lib/radvd.py
+++ b/src/antlion/controllers/ap_lib/radvd.py
@@ -17,6 +17,9 @@
 import tempfile
 import time
 
+from typing import Any, Optional
+
+from antlion.controllers.ap_lib.radvd_config import RadvdConfig
 from antlion.controllers.utils_lib.commands import shell
 from antlion.libs.proc import job
 
@@ -35,38 +38,45 @@
     Attributes:
         config: The radvd configuration that is being used.
     """
-    def __init__(self, runner, interface, working_dir=None, radvd_binary=None):
+
+    def __init__(
+        self,
+        runner: Any,
+        interface: str,
+        working_dir: Optional[str] = None,
+        radvd_binary: Optional[str] = None,
+    ) -> None:
         """
         Args:
             runner: Object that has run_async and run methods for executing
                     shell commands (e.g. connection.SshConnection)
-            interface: string, The name of the interface to use (eg. wlan0).
-            working_dir: The directory to work out of.
+            interface: Name of the interface to use (eg. wlan0).
+            working_dir: Directory to work out of.
             radvd_binary: Location of the radvd binary
         """
         if not radvd_binary:
-            logging.debug('No radvd binary specified.  '
-                          'Assuming radvd is in the path.')
-            radvd_binary = 'radvd'
+            logging.debug(
+                "No radvd binary specified.  " "Assuming radvd is in the path."
+            )
+            radvd_binary = "radvd"
         else:
-            logging.debug('Using radvd binary located at %s' % radvd_binary)
+            logging.debug(f"Using radvd binary located at {radvd_binary}")
         if working_dir is None and runner == job.run:
             working_dir = tempfile.gettempdir()
         else:
-            working_dir = '/tmp'
+            working_dir = "/tmp"
         self._radvd_binary = radvd_binary
         self._runner = runner
         self._interface = interface
         self._working_dir = working_dir
-        self.config = None
+        self.config: Optional[RadvdConfig] = None
         self._shell = shell.ShellCommand(runner, working_dir)
-        self._log_file = '%s/radvd-%s.log' % (working_dir, self._interface)
-        self._config_file = '%s/radvd-%s.conf' % (working_dir, self._interface)
-        self._pid_file = '%s/radvd-%s.pid' % (working_dir, self._interface)
-        self._ps_identifier = '%s.*%s' % (self._radvd_binary,
-                                          self._config_file)
+        self._log_file = f"{working_dir}/radvd-{self._interface}.log"
+        self._config_file = f"{working_dir}/radvd-{self._interface}.conf"
+        self._pid_file = f"{working_dir}/radvd-{self._interface}.pid"
+        self._ps_identifier = f"{self._radvd_binary}.*{self._config_file}"
 
-    def start(self, config, timeout=60):
+    def start(self, config: RadvdConfig, timeout: int = 60) -> None:
         """Starts radvd
 
         Starts the radvd daemon and runs it in the background.
@@ -91,11 +101,12 @@
         self._shell.delete_file(self._config_file)
         self._write_configs(self.config)
 
-        radvd_command = '%s -C %s -p %s -m logfile -d 5 -l %s' % (
-            self._radvd_binary, shlex.quote(self._config_file),
-            shlex.quote(self._pid_file), self._log_file)
-        job_str = '%s > "%s" 2>&1' % (radvd_command, self._log_file)
-        self._runner.run_async(job_str)
+        command = (
+            f"{self._radvd_binary} -C {shlex.quote(self._config_file)} "
+            f"-p {shlex.quote(self._pid_file)} -m logfile -d 5 "
+            f'-l {self._log_file} > "{self._log_file}" 2>&1'
+        )
+        self._runner.run_async(command)
 
         try:
             self._wait_for_process(timeout=timeout)
@@ -114,7 +125,7 @@
         """
         return self._shell.is_alive(self._ps_identifier)
 
-    def pull_logs(self):
+    def pull_logs(self) -> str:
         """Pulls the log files from where radvd is running.
 
         Returns:
@@ -123,7 +134,7 @@
         # TODO: Auto pulling of logs when stop is called.
         return self._shell.read_file(self._log_file)
 
-    def _wait_for_process(self, timeout=60):
+    def _wait_for_process(self, timeout: int = 60) -> None:
         """Waits for the process to come up.
 
         Waits until the radvd process is found running, or there is
@@ -138,7 +149,7 @@
             self._scan_for_errors(False)
         self._scan_for_errors(True)
 
-    def _scan_for_errors(self, should_be_up):
+    def _scan_for_errors(self, should_be_up: bool) -> None:
         """Scans the radvd log for any errors.
 
         Args:
@@ -152,13 +163,13 @@
         # Store this so that all other errors have priority.
         is_dead = not self.is_alive()
 
-        exited_prematurely = self._shell.search_file('Exiting', self._log_file)
+        exited_prematurely = self._shell.search_file("Exiting", self._log_file)
         if exited_prematurely:
-            raise Error('Radvd exited prematurely.', self)
+            raise Error("Radvd exited prematurely.", self)
         if should_be_up and is_dead:
-            raise Error('Radvd failed to start', self)
+            raise Error("Radvd failed to start", self)
 
-    def _write_configs(self, config):
+    def _write_configs(self, config: RadvdConfig) -> None:
         """Writes the configs to the radvd config file.
 
         Args:
@@ -166,38 +177,39 @@
         """
         self._shell.delete_file(self._config_file)
         conf = config.package_configs()
-        lines = ['interface %s {' % self._interface]
-        for (interface_option_key,
-             interface_option) in conf['interface_options'].items():
-            lines.append('\t%s %s;' %
-                         (str(interface_option_key), str(interface_option)))
-        lines.append('\tprefix %s' % conf['prefix'])
-        lines.append('\t{')
-        for prefix_option in conf['prefix_options'].items():
-            lines.append('\t\t%s;' % ' '.join(map(str, prefix_option)))
-        lines.append('\t};')
-        if conf['clients']:
-            lines.append('\tclients')
-            lines.append('\t{')
-            for client in conf['clients']:
-                lines.append('\t\t%s;' % client)
-            lines.append('\t};')
-        if conf['route']:
-            lines.append('\troute %s {' % conf['route'])
-            for route_option in conf['route_options'].items():
-                lines.append('\t\t%s;' % ' '.join(map(str, route_option)))
-            lines.append('\t};')
-        if conf['rdnss']:
-            lines.append('\tRDNSS %s {' %
-                         ' '.join([str(elem) for elem in conf['rdnss']]))
-            for rdnss_option in conf['rdnss_options'].items():
-                lines.append('\t\t%s;' % ' '.join(map(str, rdnss_option)))
-            lines.append('\t};')
-        lines.append('};')
-        output_config = '\n'.join(lines)
-        logging.info('Writing %s' % self._config_file)
-        logging.debug('******************Start*******************')
-        logging.debug('\n%s' % output_config)
-        logging.debug('*******************End********************')
+        lines = ["interface %s {" % self._interface]
+        for interface_option_key, interface_option in conf["interface_options"].items():
+            lines.append(
+                "\t%s %s;" % (str(interface_option_key), str(interface_option))
+            )
+        lines.append("\tprefix %s" % conf["prefix"])
+        lines.append("\t{")
+        for prefix_option in conf["prefix_options"].items():
+            lines.append("\t\t%s;" % " ".join(map(str, prefix_option)))
+        lines.append("\t};")
+        if conf["clients"]:
+            lines.append("\tclients")
+            lines.append("\t{")
+            for client in conf["clients"]:
+                lines.append("\t\t%s;" % client)
+            lines.append("\t};")
+        if conf["route"]:
+            lines.append("\troute %s {" % conf["route"])
+            for route_option in conf["route_options"].items():
+                lines.append("\t\t%s;" % " ".join(map(str, route_option)))
+            lines.append("\t};")
+        if conf["rdnss"]:
+            lines.append(
+                "\tRDNSS %s {" % " ".join([str(elem) for elem in conf["rdnss"]])
+            )
+            for rdnss_option in conf["rdnss_options"].items():
+                lines.append("\t\t%s;" % " ".join(map(str, rdnss_option)))
+            lines.append("\t};")
+        lines.append("};")
+        output_config = "\n".join(lines)
+        logging.info("Writing %s" % self._config_file)
+        logging.debug("******************Start*******************")
+        logging.debug("\n%s" % output_config)
+        logging.debug("*******************End********************")
 
         self._shell.write_file(self._config_file, output_config)
diff --git a/src/antlion/controllers/ap_lib/radvd_config.py b/src/antlion/controllers/ap_lib/radvd_config.py
index f8e583e..647df82 100644
--- a/src/antlion/controllers/ap_lib/radvd_config.py
+++ b/src/antlion/controllers/ap_lib/radvd_config.py
@@ -12,6 +12,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from typing import Any, List, Optional
+
 from antlion.controllers.ap_lib import radvd_constants
 
 import collections
@@ -23,43 +25,45 @@
     All the settings for a router advertisement daemon.
     """
 
-    def __init__(self,
-                 prefix=radvd_constants.DEFAULT_PREFIX,
-                 clients=[],
-                 route=None,
-                 rdnss=[],
-                 ignore_if_missing=None,
-                 adv_send_advert=radvd_constants.ADV_SEND_ADVERT_ON,
-                 unicast_only=None,
-                 max_rtr_adv_interval=None,
-                 min_rtr_adv_interval=None,
-                 min_delay_between_ras=None,
-                 adv_managed_flag=None,
-                 adv_other_config_flag=None,
-                 adv_link_mtu=None,
-                 adv_reachable_time=None,
-                 adv_retrans_timer=None,
-                 adv_cur_hop_limit=None,
-                 adv_default_lifetime=None,
-                 adv_default_preference=None,
-                 adv_source_ll_address=None,
-                 adv_home_agent_flag=None,
-                 adv_home_agent_info=None,
-                 home_agent_lifetime=None,
-                 home_agent_preference=None,
-                 adv_mob_rtr_support_flag=None,
-                 adv_interval_opt=None,
-                 adv_on_link=radvd_constants.ADV_ON_LINK_ON,
-                 adv_autonomous=radvd_constants.ADV_AUTONOMOUS_ON,
-                 adv_router_addr=None,
-                 adv_valid_lifetime=None,
-                 adv_preferred_lifetime=None,
-                 base_6to4_interface=None,
-                 adv_route_lifetime=None,
-                 adv_route_preference=None,
-                 adv_rdnss_preference=None,
-                 adv_rdnss_open=None,
-                 adv_rdnss_lifetime=None):
+    def __init__(
+        self,
+        prefix: str = radvd_constants.DEFAULT_PREFIX,
+        clients: List[str] = [],
+        route: Optional[Any] = None,
+        rdnss: List[str] = [],
+        ignore_if_missing: Optional[str] = None,
+        adv_send_advert: str = radvd_constants.ADV_SEND_ADVERT_ON,
+        unicast_only: Optional[str] = None,
+        max_rtr_adv_interval: Optional[int] = None,
+        min_rtr_adv_interval: Optional[int] = None,
+        min_delay_between_ras: Optional[int] = None,
+        adv_managed_flag: Optional[str] = None,
+        adv_other_config_flag: Optional[str] = None,
+        adv_link_mtu: Optional[int] = None,
+        adv_reachable_time: Optional[int] = None,
+        adv_retrans_timer: Optional[int] = None,
+        adv_cur_hop_limit: Optional[int] = None,
+        adv_default_lifetime: Optional[int] = None,
+        adv_default_preference: Optional[str] = None,
+        adv_source_ll_address: Optional[str] = None,
+        adv_home_agent_flag: Optional[str] = None,
+        adv_home_agent_info: Optional[str] = None,
+        home_agent_lifetime: Optional[int] = None,
+        home_agent_preference: Optional[int] = None,
+        adv_mob_rtr_support_flag: Optional[str] = None,
+        adv_interval_opt: Optional[str] = None,
+        adv_on_link: str = radvd_constants.ADV_ON_LINK_ON,
+        adv_autonomous: str = radvd_constants.ADV_AUTONOMOUS_ON,
+        adv_router_addr: Optional[str] = None,
+        adv_valid_lifetime: Optional[int] = None,
+        adv_preferred_lifetime: Optional[int] = None,
+        base_6to4_interface: Optional[str] = None,
+        adv_route_lifetime: Optional[int] = None,
+        adv_route_preference: Optional[str] = None,
+        adv_rdnss_preference: Optional[int] = None,
+        adv_rdnss_open: Optional[str] = None,
+        adv_rdnss_lifetime: Optional[int] = None,
+    ) -> None:
         """Construct a RadvdConfig.
 
         Args:
@@ -238,53 +242,73 @@
 
     def package_configs(self):
         conf = dict()
-        conf['prefix'] = self._prefix
-        conf['clients'] = self._clients
-        conf['route'] = self._route
-        conf['rdnss'] = self._rdnss
+        conf["prefix"] = self._prefix
+        conf["clients"] = self._clients
+        conf["route"] = self._route
+        conf["rdnss"] = self._rdnss
 
-        conf['interface_options'] = collections.OrderedDict(
-            filter(lambda pair: pair[1] is not None,
-                   (('IgnoreIfMissing', self._ignore_if_missing),
-                    ('AdvSendAdvert', self._adv_send_advert),
-                    ('UnicastOnly', self._unicast_only),
-                    ('MaxRtrAdvInterval', self._max_rtr_adv_interval),
-                    ('MinRtrAdvInterval', self._min_rtr_adv_interval),
-                    ('MinDelayBetweenRAs', self._min_delay_between_ras),
-                    ('AdvManagedFlag', self._adv_managed_flag),
-                    ('AdvOtherConfigFlag', self._adv_other_config_flag),
-                    ('AdvLinkMTU', self._adv_link_mtu),
-                    ('AdvReachableTime', self._adv_reachable_time),
-                    ('AdvRetransTimer', self._adv_retrans_timer),
-                    ('AdvCurHopLimit', self._adv_cur_hop_limit),
-                    ('AdvDefaultLifetime', self._adv_default_lifetime),
-                    ('AdvDefaultPreference', self._adv_default_preference),
-                    ('AdvSourceLLAddress', self._adv_source_ll_address),
-                    ('AdvHomeAgentFlag', self._adv_home_agent_flag),
-                    ('AdvHomeAgentInfo', self._adv_home_agent_info),
-                    ('HomeAgentLifetime', self._home_agent_lifetime),
-                    ('HomeAgentPreference', self._home_agent_preference),
-                    ('AdvMobRtrSupportFlag', self._adv_mob_rtr_support_flag),
-                    ('AdvIntervalOpt', self._adv_interval_opt))))
+        conf["interface_options"] = collections.OrderedDict(
+            filter(
+                lambda pair: pair[1] is not None,
+                (
+                    ("IgnoreIfMissing", self._ignore_if_missing),
+                    ("AdvSendAdvert", self._adv_send_advert),
+                    ("UnicastOnly", self._unicast_only),
+                    ("MaxRtrAdvInterval", self._max_rtr_adv_interval),
+                    ("MinRtrAdvInterval", self._min_rtr_adv_interval),
+                    ("MinDelayBetweenRAs", self._min_delay_between_ras),
+                    ("AdvManagedFlag", self._adv_managed_flag),
+                    ("AdvOtherConfigFlag", self._adv_other_config_flag),
+                    ("AdvLinkMTU", self._adv_link_mtu),
+                    ("AdvReachableTime", self._adv_reachable_time),
+                    ("AdvRetransTimer", self._adv_retrans_timer),
+                    ("AdvCurHopLimit", self._adv_cur_hop_limit),
+                    ("AdvDefaultLifetime", self._adv_default_lifetime),
+                    ("AdvDefaultPreference", self._adv_default_preference),
+                    ("AdvSourceLLAddress", self._adv_source_ll_address),
+                    ("AdvHomeAgentFlag", self._adv_home_agent_flag),
+                    ("AdvHomeAgentInfo", self._adv_home_agent_info),
+                    ("HomeAgentLifetime", self._home_agent_lifetime),
+                    ("HomeAgentPreference", self._home_agent_preference),
+                    ("AdvMobRtrSupportFlag", self._adv_mob_rtr_support_flag),
+                    ("AdvIntervalOpt", self._adv_interval_opt),
+                ),
+            )
+        )
 
-        conf['prefix_options'] = collections.OrderedDict(
-            filter(lambda pair: pair[1] is not None,
-                   (('AdvOnLink', self._adv_on_link),
-                    ('AdvAutonomous', self._adv_autonomous),
-                    ('AdvRouterAddr', self._adv_router_addr),
-                    ('AdvValidLifetime', self._adv_valid_lifetime),
-                    ('AdvPreferredLifetime', self._adv_preferred_lifetime),
-                    ('Base6to4Interface', self._base_6to4_interface))))
+        conf["prefix_options"] = collections.OrderedDict(
+            filter(
+                lambda pair: pair[1] is not None,
+                (
+                    ("AdvOnLink", self._adv_on_link),
+                    ("AdvAutonomous", self._adv_autonomous),
+                    ("AdvRouterAddr", self._adv_router_addr),
+                    ("AdvValidLifetime", self._adv_valid_lifetime),
+                    ("AdvPreferredLifetime", self._adv_preferred_lifetime),
+                    ("Base6to4Interface", self._base_6to4_interface),
+                ),
+            )
+        )
 
-        conf['route_options'] = collections.OrderedDict(
-            filter(lambda pair: pair[1] is not None,
-                   (('AdvRouteLifetime', self._adv_route_lifetime),
-                    ('AdvRoutePreference', self._adv_route_preference))))
+        conf["route_options"] = collections.OrderedDict(
+            filter(
+                lambda pair: pair[1] is not None,
+                (
+                    ("AdvRouteLifetime", self._adv_route_lifetime),
+                    ("AdvRoutePreference", self._adv_route_preference),
+                ),
+            )
+        )
 
-        conf['rdnss_options'] = collections.OrderedDict(
-            filter(lambda pair: pair[1] is not None,
-                   (('AdvRDNSSPreference', self._adv_rdnss_preference),
-                    ('AdvRDNSSOpen', self._adv_rdnss_open),
-                    ('AdvRDNSSLifetime', self._adv_rdnss_lifetime))))
+        conf["rdnss_options"] = collections.OrderedDict(
+            filter(
+                lambda pair: pair[1] is not None,
+                (
+                    ("AdvRDNSSPreference", self._adv_rdnss_preference),
+                    ("AdvRDNSSOpen", self._adv_rdnss_open),
+                    ("AdvRDNSSLifetime", self._adv_rdnss_lifetime),
+                ),
+            )
+        )
 
         return conf
diff --git a/src/antlion/controllers/ap_lib/radvd_constants.py b/src/antlion/controllers/ap_lib/radvd_constants.py
index 172a660..b02a694 100644
--- a/src/antlion/controllers/ap_lib/radvd_constants.py
+++ b/src/antlion/controllers/ap_lib/radvd_constants.py
@@ -14,53 +14,53 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-DEFAULT_PREFIX = 'fd00::/64'
+DEFAULT_PREFIX = "fd00::/64"
 
-IGNORE_IF_MISSING_ON = 'on'
-IGNORE_IF_MISSING_OFF = 'off'
+IGNORE_IF_MISSING_ON = "on"
+IGNORE_IF_MISSING_OFF = "off"
 
-ADV_SEND_ADVERT_ON = 'on'
-ADV_SEND_ADVERT_OFF = 'off'
+ADV_SEND_ADVERT_ON = "on"
+ADV_SEND_ADVERT_OFF = "off"
 
-UNICAST_ONLY_ON = 'on'
-UNICAST_ONLY_OFF = 'off'
+UNICAST_ONLY_ON = "on"
+UNICAST_ONLY_OFF = "off"
 
-ADV_MANAGED_FLAG_ON = 'on'
-ADV_MANAGED_FLAG_OFF = 'off'
+ADV_MANAGED_FLAG_ON = "on"
+ADV_MANAGED_FLAG_OFF = "off"
 
-ADV_OTHER_CONFIG_FLAG_ON = 'on'
-ADV_OTHER_CONFIG_FLAG_OFF = 'off'
+ADV_OTHER_CONFIG_FLAG_ON = "on"
+ADV_OTHER_CONFIG_FLAG_OFF = "off"
 
-ADV_DEFAULT_PREFERENCE_ON = 'on'
-ADV_DEFAULT_PREFERENCE_OFF = 'off'
+ADV_DEFAULT_PREFERENCE_ON = "on"
+ADV_DEFAULT_PREFERENCE_OFF = "off"
 
-ADV_SOURCE_LL_ADDRESS_ON = 'on'
-ADV_SOURCE_LL_ADDRESS_OFF = 'off'
+ADV_SOURCE_LL_ADDRESS_ON = "on"
+ADV_SOURCE_LL_ADDRESS_OFF = "off"
 
-ADV_HOME_AGENT_FLAG_ON = 'on'
-ADV_HOME_AGENT_FLAG_OFF = 'off'
+ADV_HOME_AGENT_FLAG_ON = "on"
+ADV_HOME_AGENT_FLAG_OFF = "off"
 
-ADV_HOME_AGENT_INFO_ON = 'on'
-ADV_HOME_AGENT_INFO_OFF = 'off'
+ADV_HOME_AGENT_INFO_ON = "on"
+ADV_HOME_AGENT_INFO_OFF = "off"
 
-ADV_MOB_RTR_SUPPORT_FLAG_ON = 'on'
-ADV_MOB_RTR_SUPPORT_FLAG_OFF = 'off'
+ADV_MOB_RTR_SUPPORT_FLAG_ON = "on"
+ADV_MOB_RTR_SUPPORT_FLAG_OFF = "off"
 
-ADV_INTERVAL_OPT_ON = 'on'
-ADV_INTERVAL_OPT_OFF = 'off'
+ADV_INTERVAL_OPT_ON = "on"
+ADV_INTERVAL_OPT_OFF = "off"
 
-ADV_ON_LINK_ON = 'on'
-ADV_ON_LINK_OFF = 'off'
+ADV_ON_LINK_ON = "on"
+ADV_ON_LINK_OFF = "off"
 
-ADV_AUTONOMOUS_ON = 'on'
-ADV_AUTONOMOUS_OFF = 'off'
+ADV_AUTONOMOUS_ON = "on"
+ADV_AUTONOMOUS_OFF = "off"
 
-ADV_ROUTER_ADDR_ON = 'on'
-ADV_ROUTER_ADDR_OFF = 'off'
+ADV_ROUTER_ADDR_ON = "on"
+ADV_ROUTER_ADDR_OFF = "off"
 
-ADV_ROUTE_PREFERENCE_LOW = 'low'
-ADV_ROUTE_PREFERENCE_MED = 'medium'
-ADV_ROUTE_PREFERENCE_HIGH = 'high'
+ADV_ROUTE_PREFERENCE_LOW = "low"
+ADV_ROUTE_PREFERENCE_MED = "medium"
+ADV_ROUTE_PREFERENCE_HIGH = "high"
 
-ADV_RDNSS_OPEN_ON = 'on'
-ADV_RDNSS_OPEN_OFF = 'off'
+ADV_RDNSS_OPEN_ON = "on"
+ADV_RDNSS_OPEN_OFF = "off"
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py
index 78931e9..9e48935 100644
--- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py
+++ b/src/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py
@@ -19,10 +19,7 @@
 from antlion.controllers.ap_lib import hostapd_utils
 
 
-def actiontec_pk5000(iface_wlan_2g=None,
-                     channel=None,
-                     security=None,
-                     ssid=None):
+def actiontec_pk5000(iface_wlan_2g=None, channel=None, security=None, ssid=None):
     """A simulated implementation of what a Actiontec PK5000 AP
     Args:
         iface_wlan_2g: The 2.4 interface of the test AP.
@@ -43,16 +40,15 @@
     if channel > 11:
         # Technically this should be 14 but since the PK5000 is a US only AP,
         # 11 is the highest allowable channel.
-        raise ValueError('The Actiontec PK5000 does not support 5Ghz. '
-                         'Invalid channel (%s)' % channel)
+        raise ValueError(
+            "The Actiontec PK5000 does not support 5Ghz. "
+            "Invalid channel (%s)" % channel
+        )
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     interface = iface_wlan_2g
     short_preamble = False
@@ -62,7 +58,8 @@
     # Sets the basic rates and supported rates of the PK5000
     additional_params = utils.merge_dicts(
         hostapd_constants.CCK_AND_OFDM_BASIC_RATES,
-        hostapd_constants.CCK_AND_OFDM_DATA_RATES)
+        hostapd_constants.CCK_AND_OFDM_DATA_RATES,
+    )
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -75,15 +72,13 @@
         beacon_interval=beacon_interval,
         dtim_period=dtim_period,
         short_preamble=short_preamble,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
 
     return config
 
 
-def actiontec_mi424wr(iface_wlan_2g=None,
-                      channel=None,
-                      security=None,
-                      ssid=None):
+def actiontec_mi424wr(iface_wlan_2g=None, channel=None, security=None, ssid=None):
     # TODO(b/143104825): Permit RIFS once it is supported
     """A simulated implementation of an Actiontec MI424WR AP.
     Args:
@@ -107,32 +102,32 @@
                 RIFS: Prohibited
     """
     if channel > 11:
-        raise ValueError('The Actiontec MI424WR does not support 5Ghz. '
-                         'Invalid channel (%s)' % channel)
+        raise ValueError(
+            "The Actiontec MI424WR does not support 5Ghz. "
+            "Invalid channel (%s)" % channel
+        )
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     n_capabilities = [
         hostapd_constants.N_CAPABILITY_TX_STBC,
         hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
-        hostapd_constants.N_CAPABILITY_RX_STBC1
+        hostapd_constants.N_CAPABILITY_RX_STBC1,
     ]
-    rates = utils.merge_dicts(hostapd_constants.CCK_AND_OFDM_DATA_RATES,
-                              hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
+    rates = utils.merge_dicts(
+        hostapd_constants.CCK_AND_OFDM_DATA_RATES,
+        hostapd_constants.CCK_AND_OFDM_BASIC_RATES,
+    )
     # Proprietary Atheros Communication: Adv Capability IE
     # Proprietary Atheros Communication: Unknown IE
     # Country Info: US Only IE
     vendor_elements = {
-        'vendor_elements':
-        'dd0900037f01010000ff7f'
-        'dd0a00037f04010000000000'
-        '0706555320010b1b'
+        "vendor_elements": "dd0900037f01010000ff7f"
+        "dd0a00037f04010000000000"
+        "0706555320010b1b"
     }
 
     additional_params = utils.merge_dicts(rates, vendor_elements)
@@ -149,6 +144,7 @@
         dtim_period=1,
         short_preamble=True,
         n_capabilities=n_capabilities,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
 
     return config
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py
index 376d02c..ea25157 100644
--- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py
+++ b/src/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py
@@ -19,11 +19,9 @@
 from antlion.controllers.ap_lib import hostapd_utils
 
 
-def asus_rtac66u(iface_wlan_2g=None,
-                 iface_wlan_5g=None,
-                 channel=None,
-                 security=None,
-                 ssid=None):
+def asus_rtac66u(
+    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
+):
     # TODO(b/143104825): Permit RIFS once it is supported
     # TODO(b/144446076): Address non-whirlwind hardware capabilities.
     """A simulated implementation of an Asus RTAC66U AP.
@@ -75,15 +73,11 @@
                 Simulated: RIFS Prohibited
     """
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
     rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
@@ -94,15 +88,14 @@
         hostapd_constants.N_CAPABILITY_RX_STBC1,
         hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
         hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
-        hostapd_constants.N_CAPABILITY_SGI20
+        hostapd_constants.N_CAPABILITY_SGI20,
     ]
     # WPS IE
     # Broadcom IE
     vendor_elements = {
-        'vendor_elements':
-        'dd310050f204104a00011010440001021047001093689729d373c26cb1563c6c570f33'
-        'd7103c0001031049000600372a000120'
-        'dd090010180200001c0000'
+        "vendor_elements": "dd310050f204104a00011010440001021047001093689729d373c26cb1563c6c570f33"
+        "d7103c0001031049000600372a000120"
+        "dd090010180200001c0000"
     }
 
     # 2.4GHz
@@ -123,11 +116,12 @@
             hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
             hostapd_constants.AC_CAPABILITY_RX_STBC_1,
             hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
-            hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7
+            hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
         ]
 
-    additional_params = utils.merge_dicts(rates, vendor_elements,
-                                          hostapd_constants.UAPSD_ENABLED)
+    additional_params = utils.merge_dicts(
+        rates, vendor_elements, hostapd_constants.UAPSD_ENABLED
+    )
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -143,16 +137,15 @@
         n_capabilities=n_capabilities,
         ac_capabilities=ac_capabilities,
         vht_channel_width=vht_channel_width,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
 
     return config
 
 
-def asus_rtac86u(iface_wlan_2g=None,
-                 iface_wlan_5g=None,
-                 channel=None,
-                 security=None,
-                 ssid=None):
+def asus_rtac86u(
+    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
+):
     """A simulated implementation of an Asus RTAC86U AP.
     Args:
         iface_wlan_2g: The 2.4Ghz interface of the test AP.
@@ -185,19 +178,15 @@
                     RSN PTKSA Replay Counter Capab: 1
     """
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
     rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
-    qbss = {'bss_load_update_period': 50, 'chan_util_avg_period': 600}
+    qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
 
     # 2.4GHz
     if channel <= 11:
@@ -206,24 +195,23 @@
         rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
         spectrum_mgmt = False
         # Measurement Pilot Transmission IE
-        vendor_elements = {'vendor_elements': '42020000'}
+        vendor_elements = {"vendor_elements": "42020000"}
 
     # 5GHz
     else:
         interface = iface_wlan_5g
         mode = hostapd_constants.MODE_11A
         rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        spectrum_mgmt = True,
+        spectrum_mgmt = (True,)
         # Country Information IE (w/ individual channel info)
         # TPC Report Transmit Power IE
         # Measurement Pilot Transmission IE
         vendor_elements = {
-            'vendor_elements':
-            '074255532024011e28011e2c011e30011e34011e38011e3c011e40011e64011e'
-            '68011e6c011e70011e74011e84011e88011e8c011e95011e99011e9d011ea1011e'
-            'a5011e'
-            '23021300'
-            '42020000'
+            "vendor_elements": "074255532024011e28011e2c011e30011e34011e38011e3c011e40011e64011e"
+            "68011e6c011e70011e74011e84011e88011e8c011e95011e99011e9d011ea1011e"
+            "a5011e"
+            "23021300"
+            "42020000"
         }
 
     additional_params = utils.merge_dicts(rates, qbss, vendor_elements)
@@ -240,15 +228,14 @@
         dtim_period=3,
         short_preamble=False,
         spectrum_mgmt_required=spectrum_mgmt,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
     return config
 
 
-def asus_rtac5300(iface_wlan_2g=None,
-                  iface_wlan_5g=None,
-                  channel=None,
-                  security=None,
-                  ssid=None):
+def asus_rtac5300(
+    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
+):
     # TODO(b/143104825): Permit RIFS once it is supported
     # TODO(b/144446076): Address non-whirlwind hardware capabilities.
     """A simulated implementation of an Asus RTAC5300 AP.
@@ -297,29 +284,25 @@
                 Simulated: RIFS Prohibited
     """
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
     rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
     vht_channel_width = 20
-    qbss = {'bss_load_update_period': 50, 'chan_util_avg_period': 600}
+    qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
     n_capabilities = [
         hostapd_constants.N_CAPABILITY_LDPC,
         hostapd_constants.N_CAPABILITY_TX_STBC,
         hostapd_constants.N_CAPABILITY_RX_STBC1,
-        hostapd_constants.N_CAPABILITY_SGI20
+        hostapd_constants.N_CAPABILITY_SGI20,
     ]
 
     # Broadcom IE
-    vendor_elements = {'vendor_elements': 'dd090010180200009c0000'}
+    vendor_elements = {"vendor_elements": "dd090010180200009c0000"}
 
     # 2.4GHz
     if channel <= 11:
@@ -328,9 +311,11 @@
         mode = hostapd_constants.MODE_11N_MIXED
         # AsusTek IE
         # Epigram 2.4GHz IE
-        vendor_elements['vendor_elements'] += 'dd25f832e4010101020100031411b5' \
-        '2fd437509c30b3d7f5cf5754fb125aed3b8507045aed3b85' \
-        'dd1e00904c0418bf0cb2798b0faaff0000aaff0000c0050001000000c3020002'
+        vendor_elements["vendor_elements"] += (
+            "dd25f832e4010101020100031411b5"
+            "2fd437509c30b3d7f5cf5754fb125aed3b8507045aed3b85"
+            "dd1e00904c0418bf0cb2798b0faaff0000aaff0000c0050001000000c3020002"
+        )
         ac_capabilities = None
 
     # 5GHz
@@ -339,18 +324,19 @@
         rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
         mode = hostapd_constants.MODE_11AC_MIXED
         # Epigram 5GHz IE
-        vendor_elements['vendor_elements'] += 'dd0500904c0410'
+        vendor_elements["vendor_elements"] += "dd0500904c0410"
         ac_capabilities = [
             hostapd_constants.AC_CAPABILITY_RXLDPC,
             hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
             hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
             hostapd_constants.AC_CAPABILITY_RX_STBC_1,
             hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
-            hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7
+            hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
         ]
 
-    additional_params = utils.merge_dicts(rates, qbss, vendor_elements,
-                                          hostapd_constants.UAPSD_ENABLED)
+    additional_params = utils.merge_dicts(
+        rates, qbss, vendor_elements, hostapd_constants.UAPSD_ENABLED
+    )
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -366,15 +352,14 @@
         n_capabilities=n_capabilities,
         ac_capabilities=ac_capabilities,
         vht_channel_width=vht_channel_width,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
     return config
 
 
-def asus_rtn56u(iface_wlan_2g=None,
-                iface_wlan_5g=None,
-                channel=None,
-                security=None,
-                ssid=None):
+def asus_rtn56u(
+    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
+):
     """A simulated implementation of an Asus RTN56U AP.
     Args:
         iface_wlan_2g: The 2.4Ghz interface of the test AP.
@@ -409,24 +394,20 @@
                     RSN PTKSA Replay Counter Capab: 16
     """
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
     rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
-    qbss = {'bss_load_update_period': 50, 'chan_util_avg_period': 600}
+    qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
     n_capabilities = [
         hostapd_constants.N_CAPABILITY_SGI20,
         hostapd_constants.N_CAPABILITY_SGI40,
         hostapd_constants.N_CAPABILITY_TX_STBC,
-        hostapd_constants.N_CAPABILITY_RX_STBC1
+        hostapd_constants.N_CAPABILITY_RX_STBC1,
     ]
 
     # 2.4GHz
@@ -438,13 +419,12 @@
         # AP Channel Report IEs (2)
         # WPS IE
         vendor_elements = {
-            'vendor_elements':
-            'dd07000c4307000000'
-            '0706555320010b14'
-            '33082001020304050607'
-            '33082105060708090a0b'
-            'dd270050f204104a000110104400010210470010bc329e001dd811b286011c872c'
-            'd33448103c000101'
+            "vendor_elements": "dd07000c4307000000"
+            "0706555320010b14"
+            "33082001020304050607"
+            "33082105060708090a0b"
+            "dd270050f204104a000110104400010210470010bc329e001dd811b286011c872c"
+            "d33448103c000101"
         }
 
     # 5GHz
@@ -453,13 +433,11 @@
         rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
         # Ralink Technology IE
         # US Country Code IE
-        vendor_elements = {
-            'vendor_elements': 'dd07000c4307000000'
-            '0706555320010b14'
-        }
+        vendor_elements = {"vendor_elements": "dd07000c4307000000" "0706555320010b14"}
 
-    additional_params = utils.merge_dicts(rates, vendor_elements, qbss,
-                                          hostapd_constants.UAPSD_ENABLED)
+    additional_params = utils.merge_dicts(
+        rates, vendor_elements, qbss, hostapd_constants.UAPSD_ENABLED
+    )
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -473,16 +451,15 @@
         dtim_period=1,
         short_preamble=False,
         n_capabilities=n_capabilities,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
 
     return config
 
 
-def asus_rtn66u(iface_wlan_2g=None,
-                iface_wlan_5g=None,
-                channel=None,
-                security=None,
-                ssid=None):
+def asus_rtn66u(
+    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
+):
     # TODO(b/143104825): Permit RIFS once it is supported
     """A simulated implementation of an Asus RTN66U AP.
     Args:
@@ -516,15 +493,11 @@
                     Simulated: MPDU Density 8
     """
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
     rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
@@ -533,10 +506,10 @@
         hostapd_constants.N_CAPABILITY_SGI20,
         hostapd_constants.N_CAPABILITY_TX_STBC,
         hostapd_constants.N_CAPABILITY_RX_STBC1,
-        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935
+        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
     ]
     # Broadcom IE
-    vendor_elements = {'vendor_elements': 'dd090010180200001c0000'}
+    vendor_elements = {"vendor_elements": "dd090010180200001c0000"}
 
     # 2.4GHz
     if channel <= 11:
@@ -549,8 +522,9 @@
         interface = iface_wlan_5g
         rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
 
-    additional_params = utils.merge_dicts(rates, vendor_elements,
-                                          hostapd_constants.UAPSD_ENABLED)
+    additional_params = utils.merge_dicts(
+        rates, vendor_elements, hostapd_constants.UAPSD_ENABLED
+    )
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -564,6 +538,7 @@
         dtim_period=3,
         short_preamble=False,
         n_capabilities=n_capabilities,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
 
     return config
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py
index 957e214..9c5c99d 100644
--- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py
+++ b/src/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py
@@ -19,10 +19,7 @@
 from antlion.controllers.ap_lib import hostapd_utils
 
 
-def belkin_f9k1001v5(iface_wlan_2g=None,
-                     channel=None,
-                     security=None,
-                     ssid=None):
+def belkin_f9k1001v5(iface_wlan_2g=None, channel=None, security=None, ssid=None):
     # TODO(b/143104825): Permit RIFS once it is supported
     """A simulated implementation of what a Belkin F9K1001v5 AP
     Args:
@@ -52,35 +49,34 @@
                 RSN PTKSA Replay Counter Capab: 16
     """
     if channel > 11:
-        raise ValueError('The Belkin F9k1001v5 does not support 5Ghz. '
-                         'Invalid channel (%s)' % channel)
+        raise ValueError(
+            "The Belkin F9k1001v5 does not support 5Ghz. "
+            "Invalid channel (%s)" % channel
+        )
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     n_capabilities = [
         hostapd_constants.N_CAPABILITY_SGI20,
         hostapd_constants.N_CAPABILITY_SGI40,
         hostapd_constants.N_CAPABILITY_TX_STBC,
         hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
-        hostapd_constants.N_CAPABILITY_DSSS_CCK_40
+        hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
     ]
 
     rates = additional_params = utils.merge_dicts(
         hostapd_constants.CCK_AND_OFDM_BASIC_RATES,
-        hostapd_constants.CCK_AND_OFDM_DATA_RATES)
+        hostapd_constants.CCK_AND_OFDM_DATA_RATES,
+    )
 
     # Broadcom IE
     # WPS IE
     vendor_elements = {
-        'vendor_elements':
-        'dd090010180200100c0000'
-        'dd180050f204104a00011010440001021049000600372a000120'
+        "vendor_elements": "dd090010180200100c0000"
+        "dd180050f204104a00011010440001021049000600372a000120"
     }
 
     additional_params = utils.merge_dicts(rates, vendor_elements)
@@ -97,6 +93,7 @@
         dtim_period=3,
         short_preamble=False,
         n_capabilities=n_capabilities,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
 
     return config
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py
index 64d76f6..8010837 100644
--- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py
+++ b/src/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py
@@ -19,11 +19,9 @@
 from antlion.controllers.ap_lib import hostapd_utils
 
 
-def linksys_ea4500(iface_wlan_2g=None,
-                   iface_wlan_5g=None,
-                   channel=None,
-                   security=None,
-                   ssid=None):
+def linksys_ea4500(
+    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
+):
     # TODO(b/143104825): Permit RIFS once it is supported
     # TODO(b/144446076): Address non-whirlwind hardware capabilities.
     """A simulated implementation of what a Linksys EA4500 AP
@@ -53,15 +51,11 @@
                 RSN PTKSA Replay Counter Capab: 16
     """
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
     rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
@@ -71,17 +65,16 @@
         hostapd_constants.N_CAPABILITY_SGI40,
         hostapd_constants.N_CAPABILITY_TX_STBC,
         hostapd_constants.N_CAPABILITY_RX_STBC1,
-        hostapd_constants.N_CAPABILITY_DSSS_CCK_40
+        hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
     ]
 
     # Epigram HT Capabilities IE
     # Epigram HT Additional Capabilities IE
     # Marvell Semiconductor, Inc. IE
     vendor_elements = {
-        'vendor_elements':
-        'dd1e00904c33fc0117ffffff0000000000000000000000000000000000000000'
-        'dd1a00904c3424000000000000000000000000000000000000000000'
-        'dd06005043030000'
+        "vendor_elements": "dd1e00904c33fc0117ffffff0000000000000000000000000000000000000000"
+        "dd1a00904c3424000000000000000000000000000000000000000000"
+        "dd06005043030000"
     }
 
     # 2.4GHz
@@ -97,8 +90,9 @@
         rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
         obss_interval = None
 
-    additional_params = utils.merge_dicts(rates, vendor_elements,
-                                          hostapd_constants.UAPSD_ENABLED)
+    additional_params = utils.merge_dicts(
+        rates, vendor_elements, hostapd_constants.UAPSD_ENABLED
+    )
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -113,16 +107,15 @@
         short_preamble=True,
         obss_interval=obss_interval,
         n_capabilities=n_capabilities,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
 
     return config
 
 
-def linksys_ea9500(iface_wlan_2g=None,
-                   iface_wlan_5g=None,
-                   channel=None,
-                   security=None,
-                   ssid=None):
+def linksys_ea9500(
+    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
+):
     """A simulated implementation of what a Linksys EA9500 AP
     Args:
         iface_wlan_2g: The 2.4Ghz interface of the test AP.
@@ -148,21 +141,17 @@
                 RSN PTKSA Replay Counter Capab: 1
     """
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
     rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
-    qbss = {'bss_load_update_period': 50, 'chan_util_avg_period': 600}
+    qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
     # Measurement Pilot Transmission IE
-    vendor_elements = {'vendor_elements': '42020000'}
+    vendor_elements = {"vendor_elements": "42020000"}
 
     # 2.4GHz
     if channel <= 11:
@@ -189,15 +178,14 @@
         beacon_interval=100,
         dtim_period=1,
         short_preamble=False,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
     return config
 
 
-def linksys_wrt1900acv2(iface_wlan_2g=None,
-                        iface_wlan_5g=None,
-                        channel=None,
-                        security=None,
-                        ssid=None):
+def linksys_wrt1900acv2(
+    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
+):
     # TODO(b/144446076): Address non-whirlwind hardware capabilities.
     """A simulated implementation of what a Linksys WRT1900ACV2 AP
     Args:
@@ -233,22 +221,18 @@
                     RSN PTKSA Replay Counter Capab: 16
     """
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
     rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
     n_capabilities = [
         hostapd_constants.N_CAPABILITY_LDPC,
         hostapd_constants.N_CAPABILITY_SGI20,
-        hostapd_constants.N_CAPABILITY_SGI40
+        hostapd_constants.N_CAPABILITY_SGI40,
     ]
     ac_capabilities = [
         hostapd_constants.AC_CAPABILITY_RXLDPC,
@@ -256,17 +240,16 @@
         hostapd_constants.AC_CAPABILITY_RX_STBC_1,
         hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
         hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN,
-        hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7
+        hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
     ]
     vht_channel_width = 20
     # Epigram, Inc. HT Capabilities IE
     # Epigram, Inc. HT Additional Capabilities IE
     # Marvell Semiconductor IE
     vendor_elements = {
-        'vendor_elements':
-        'dd1e00904c336c0017ffffff0001000000000000000000000000001fff071800'
-        'dd1a00904c3424000000000000000000000000000000000000000000'
-        'dd06005043030000'
+        "vendor_elements": "dd1e00904c336c0017ffffff0001000000000000000000000000001fff071800"
+        "dd1a00904c3424000000000000000000000000000000000000000000"
+        "dd06005043030000"
     }
 
     # 2.4GHz
@@ -282,15 +265,16 @@
         interface = iface_wlan_5g
         rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
         obss_interval = None
-        spectrum_mgmt = True,
-        local_pwr_constraint = {'local_pwr_constraint': 3}
+        spectrum_mgmt = (True,)
+        local_pwr_constraint = {"local_pwr_constraint": 3}
         # Country Information IE (w/ individual channel info)
-        vendor_elements['vendor_elements'] += '071e5553202401112801112c011130' \
-            '01119501179901179d0117a10117a50117'
+        vendor_elements["vendor_elements"] += (
+            "071e5553202401112801112c011130" "01119501179901179d0117a10117a50117"
+        )
 
-    additional_params = utils.merge_dicts(rates, vendor_elements,
-                                          hostapd_constants.UAPSD_ENABLED,
-                                          local_pwr_constraint)
+    additional_params = utils.merge_dicts(
+        rates, vendor_elements, hostapd_constants.UAPSD_ENABLED, local_pwr_constraint
+    )
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -308,5 +292,6 @@
         ac_capabilities=ac_capabilities,
         vht_channel_width=vht_channel_width,
         spectrum_mgmt_required=spectrum_mgmt,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
     return config
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/netgear.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/netgear.py
index cf9bc93..25a91cd 100644
--- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/netgear.py
+++ b/src/antlion/controllers/ap_lib/third_party_ap_profiles/netgear.py
@@ -19,11 +19,9 @@
 from antlion.controllers.ap_lib import hostapd_utils
 
 
-def netgear_r7000(iface_wlan_2g=None,
-                  iface_wlan_5g=None,
-                  channel=None,
-                  security=None,
-                  ssid=None):
+def netgear_r7000(
+    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
+):
     # TODO(b/143104825): Permit RIFS once it is supported
     # TODO(b/144446076): Address non-whirlwind hardware capabilities.
     """A simulated implementation of what a Netgear R7000 AP
@@ -79,15 +77,11 @@
                     AP Channel Report Capability: Disabled
     """
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
     rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
@@ -104,14 +98,13 @@
     # Epigram, Inc. IE
     # Broadcom IE
     vendor_elements = {
-        'vendor_elements':
-        'dd0600146c000000'
-        'dd310050f204104a00011010440001021047001066189606f1e967f9c0102048817a7'
-        '69e103c0001031049000600372a000120'
-        'dd1e00904c0408bf0cb259820feaff0000eaff0000c0050001000000c3020002'
-        'dd090010180200001c0000'
+        "vendor_elements": "dd0600146c000000"
+        "dd310050f204104a00011010440001021047001066189606f1e967f9c0102048817a7"
+        "69e103c0001031049000600372a000120"
+        "dd1e00904c0408bf0cb259820feaff0000eaff0000c0050001000000c3020002"
+        "dd090010180200001c0000"
     }
-    qbss = {'bss_load_update_period': 50, 'chan_util_avg_period': 600}
+    qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
 
     # 2.4GHz
     if channel <= 11:
@@ -142,14 +135,17 @@
             hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
             hostapd_constants.AC_CAPABILITY_RX_STBC_1,
             hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
-            hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7
+            hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
         ]
 
     additional_params = utils.merge_dicts(
-        rates, vendor_elements, qbss,
+        rates,
+        vendor_elements,
+        qbss,
         hostapd_constants.ENABLE_RRM_BEACON_REPORT,
         hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT,
-        hostapd_constants.UAPSD_ENABLED)
+        hostapd_constants.UAPSD_ENABLED,
+    )
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -166,15 +162,14 @@
         n_capabilities=n_capabilities,
         ac_capabilities=ac_capabilities,
         vht_channel_width=vht_channel_width,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
     return config
 
 
-def netgear_wndr3400(iface_wlan_2g=None,
-                     iface_wlan_5g=None,
-                     channel=None,
-                     security=None,
-                     ssid=None):
+def netgear_wndr3400(
+    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
+):
     # TODO(b/143104825): Permit RIFS on 5GHz once it is supported
     # TODO(b/144446076): Address non-whirlwind hardware capabilities.
     """A simulated implementation of what a Netgear WNDR3400 AP
@@ -209,15 +204,11 @@
                     Simulated: Green Field not supported on Whirlwind.
     """
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
     rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
@@ -226,15 +217,14 @@
         hostapd_constants.N_CAPABILITY_SGI40,
         hostapd_constants.N_CAPABILITY_TX_STBC,
         hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
-        hostapd_constants.N_CAPABILITY_DSSS_CCK_40
+        hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
     ]
     # WPS IE
     # Broadcom IE
     vendor_elements = {
-        'vendor_elements':
-        'dd310050f204104a0001101044000102104700108c403eb883e7e225ab139828703ade'
-        'dc103c0001031049000600372a000120'
-        'dd090010180200f0040000'
+        "vendor_elements": "dd310050f204104a0001101044000102104700108c403eb883e7e225ab139828703ade"
+        "dc103c0001031049000600372a000120"
+        "dd090010180200f0040000"
     }
 
     # 2.4GHz
@@ -251,8 +241,9 @@
         obss_interval = None
         n_capabilities.append(hostapd_constants.N_CAPABILITY_HT40_PLUS)
 
-    additional_params = utils.merge_dicts(rates, vendor_elements,
-                                          hostapd_constants.UAPSD_ENABLED)
+    additional_params = utils.merge_dicts(
+        rates, vendor_elements, hostapd_constants.UAPSD_ENABLED
+    )
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -267,6 +258,7 @@
         short_preamble=False,
         obss_interval=obss_interval,
         n_capabilities=n_capabilities,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
 
     return config
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/securifi.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/securifi.py
index b552b28..4a5bf68 100644
--- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/securifi.py
+++ b/src/antlion/controllers/ap_lib/third_party_ap_profiles/securifi.py
@@ -19,8 +19,7 @@
 from antlion.controllers.ap_lib import hostapd_utils
 
 
-def securifi_almond(iface_wlan_2g=None, channel=None, security=None,
-                    ssid=None):
+def securifi_almond(iface_wlan_2g=None, channel=None, security=None, ssid=None):
     """A simulated implementation of a Securifi Almond AP
     Args:
         iface_wlan_2g: The 2.4Ghz interface of the test AP.
@@ -48,16 +47,15 @@
                     RSN PTKSA Replay Counter Capab: 16
     """
     if channel > 11:
-        raise ValueError('The Securifi Almond does not support 5Ghz. '
-                         'Invalid channel (%s)' % channel)
+        raise ValueError(
+            "The Securifi Almond does not support 5Ghz. "
+            "Invalid channel (%s)" % channel
+        )
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     n_capabilities = [
         hostapd_constants.N_CAPABILITY_HT40_PLUS,
@@ -65,24 +63,25 @@
         hostapd_constants.N_CAPABILITY_SGI40,
         hostapd_constants.N_CAPABILITY_TX_STBC,
         hostapd_constants.N_CAPABILITY_RX_STBC1,
-        hostapd_constants.N_CAPABILITY_DSSS_CCK_40
+        hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
     ]
 
-    rates = utils.merge_dicts(hostapd_constants.CCK_AND_OFDM_BASIC_RATES,
-                              hostapd_constants.CCK_AND_OFDM_DATA_RATES)
+    rates = utils.merge_dicts(
+        hostapd_constants.CCK_AND_OFDM_BASIC_RATES,
+        hostapd_constants.CCK_AND_OFDM_DATA_RATES,
+    )
 
     # Ralink Technology IE
     # Country Information IE
     # AP Channel Report IEs
     vendor_elements = {
-        'vendor_elements':
-        'dd07000c4307000000'
-        '0706555320010b14'
-        '33082001020304050607'
-        '33082105060708090a0b'
+        "vendor_elements": "dd07000c4307000000"
+        "0706555320010b14"
+        "33082001020304050607"
+        "33082105060708090a0b"
     }
 
-    qbss = {'bss_load_update_period': 50, 'chan_util_avg_period': 600}
+    qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
 
     additional_params = utils.merge_dicts(rates, vendor_elements, qbss)
 
@@ -99,6 +98,7 @@
         short_preamble=True,
         obss_interval=300,
         n_capabilities=n_capabilities,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
 
     return config
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/tplink.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/tplink.py
index 8911e3e..81eeeec 100644
--- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/tplink.py
+++ b/src/antlion/controllers/ap_lib/third_party_ap_profiles/tplink.py
@@ -19,11 +19,9 @@
 from antlion.controllers.ap_lib import hostapd_utils
 
 
-def tplink_archerc5(iface_wlan_2g=None,
-                    iface_wlan_5g=None,
-                    channel=None,
-                    security=None,
-                    ssid=None):
+def tplink_archerc5(
+    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
+):
     # TODO(b/144446076): Address non-whirlwind hardware capabilities.
     """A simulated implementation of an TPLink ArcherC5 AP.
     Args:
@@ -73,15 +71,11 @@
                 Simulated: RIFS Prohibited
     """
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
     rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
@@ -90,17 +84,16 @@
         hostapd_constants.N_CAPABILITY_SGI20,
         hostapd_constants.N_CAPABILITY_TX_STBC,
         hostapd_constants.N_CAPABILITY_RX_STBC1,
-        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935
+        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
     ]
     # WPS IE
     # Broadcom IE
     vendor_elements = {
-        'vendor_elements':
-        'dd310050f204104a000110104400010210470010d96c7efc2f8938f1efbd6e5148bfa8'
-        '12103c0001031049000600372a000120'
-        'dd090010180200001c0000'
+        "vendor_elements": "dd310050f204104a000110104400010210470010d96c7efc2f8938f1efbd6e5148bfa8"
+        "12103c0001031049000600372a000120"
+        "dd090010180200001c0000"
     }
-    qbss = {'bss_load_update_period': 50, 'chan_util_avg_period': 600}
+    qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
 
     # 2.4GHz
     if channel <= 11:
@@ -128,10 +121,13 @@
         ]
 
     additional_params = utils.merge_dicts(
-        rates, vendor_elements, qbss,
+        rates,
+        vendor_elements,
+        qbss,
         hostapd_constants.ENABLE_RRM_BEACON_REPORT,
         hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT,
-        hostapd_constants.UAPSD_ENABLED)
+        hostapd_constants.UAPSD_ENABLED,
+    )
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -147,15 +143,14 @@
         n_capabilities=n_capabilities,
         ac_capabilities=ac_capabilities,
         vht_channel_width=vht_channel_width,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
     return config
 
 
-def tplink_archerc7(iface_wlan_2g=None,
-                    iface_wlan_5g=None,
-                    channel=None,
-                    security=None,
-                    ssid=None):
+def tplink_archerc7(
+    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
+):
     # TODO(b/143104825): Permit RIFS once it is supported
     """A simulated implementation of an TPLink ArcherC7 AP.
     Args:
@@ -183,15 +178,11 @@
                     RSN PTKSA Replay Counter Capab: 16
     """
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
     rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
@@ -200,14 +191,13 @@
         hostapd_constants.N_CAPABILITY_LDPC,
         hostapd_constants.N_CAPABILITY_SGI20,
         hostapd_constants.N_CAPABILITY_TX_STBC,
-        hostapd_constants.N_CAPABILITY_RX_STBC1
+        hostapd_constants.N_CAPABILITY_RX_STBC1,
     ]
     # Atheros IE
     # WPS IE
     vendor_elements = {
-        'vendor_elements':
-        'dd0900037f01010000ff7f'
-        'dd180050f204104a00011010440001021049000600372a000120'
+        "vendor_elements": "dd0900037f01010000ff7f"
+        "dd180050f204104a00011010440001021049000600372a000120"
     }
 
     # 2.4GHz
@@ -229,14 +219,15 @@
         mode = hostapd_constants.MODE_11AC_MIXED
         spectrum_mgmt = True
         # Country Information IE (w/ individual channel info)
-        vendor_elements['vendor_elements'] += (
-            '074255532024011e28011e2c011e30'
-            '011e3401173801173c01174001176401176801176c0117700117740117840117'
-            '8801178c011795011e99011e9d011ea1011ea5011e')
-        pwr_constraint = {'local_pwr_constraint': 3}
+        vendor_elements["vendor_elements"] += (
+            "074255532024011e28011e2c011e30"
+            "011e3401173801173c01174001176401176801176c0117700117740117840117"
+            "8801178c011795011e99011e9d011ea1011ea5011e"
+        )
+        pwr_constraint = {"local_pwr_constraint": 3}
         n_capabilities += [
             hostapd_constants.N_CAPABILITY_SGI40,
-            hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935
+            hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
         ]
 
         if hostapd_config.ht40_plus_allowed(channel):
@@ -252,12 +243,12 @@
             hostapd_constants.AC_CAPABILITY_RX_STBC_1,
             hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
             hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
-            hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN
+            hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN,
         ]
 
-    additional_params = utils.merge_dicts(rates, vendor_elements,
-                                          hostapd_constants.UAPSD_ENABLED,
-                                          pwr_constraint)
+    additional_params = utils.merge_dicts(
+        rates, vendor_elements, hostapd_constants.UAPSD_ENABLED, pwr_constraint
+    )
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -274,15 +265,14 @@
         ac_capabilities=ac_capabilities,
         vht_channel_width=vht_channel_width,
         spectrum_mgmt_required=spectrum_mgmt,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
     return config
 
 
-def tplink_c1200(iface_wlan_2g=None,
-                 iface_wlan_5g=None,
-                 channel=None,
-                 security=None,
-                 ssid=None):
+def tplink_c1200(
+    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
+):
     # TODO(b/143104825): Permit RIFS once it is supported
     # TODO(b/144446076): Address non-whirlwind hardware capabilities.
     """A simulated implementation of an TPLink C1200 AP.
@@ -320,15 +310,11 @@
                 Simulated: RIFS Prohibited
     """
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
     rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
@@ -337,15 +323,14 @@
         hostapd_constants.N_CAPABILITY_SGI20,
         hostapd_constants.N_CAPABILITY_TX_STBC,
         hostapd_constants.N_CAPABILITY_RX_STBC1,
-        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935
+        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
     ]
     # WPS IE
     # Broadcom IE
     vendor_elements = {
-        'vendor_elements':
-        'dd350050f204104a000110104400010210470010000000000000000000000000000000'
-        '00103c0001031049000a00372a00012005022688'
-        'dd090010180200000c0000'
+        "vendor_elements": "dd350050f204104a000110104400010210470010000000000000000000000000000000"
+        "00103c0001031049000a00372a00012005022688"
+        "dd090010180200000c0000"
     }
 
     # 2.4GHz
@@ -373,9 +358,12 @@
         ]
 
     additional_params = utils.merge_dicts(
-        rates, vendor_elements, hostapd_constants.ENABLE_RRM_BEACON_REPORT,
+        rates,
+        vendor_elements,
+        hostapd_constants.ENABLE_RRM_BEACON_REPORT,
         hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT,
-        hostapd_constants.UAPSD_ENABLED)
+        hostapd_constants.UAPSD_ENABLED,
+    )
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -391,14 +379,12 @@
         n_capabilities=n_capabilities,
         ac_capabilities=ac_capabilities,
         vht_channel_width=vht_channel_width,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
     return config
 
 
-def tplink_tlwr940n(iface_wlan_2g=None,
-                    channel=None,
-                    security=None,
-                    ssid=None):
+def tplink_tlwr940n(iface_wlan_2g=None, channel=None, security=None, ssid=None):
     # TODO(b/143104825): Permit RIFS once it is supported
     """A simulated implementation of an TPLink TLWR940N AP.
     Args:
@@ -419,37 +405,38 @@
                 RSN PTKSA Replay Counter Capab: 16
     """
     if channel > 11:
-        raise ValueError('The mock TP-Link TLWR940N does not support 5Ghz. '
-                         'Invalid channel (%s)' % channel)
+        raise ValueError(
+            "The mock TP-Link TLWR940N does not support 5Ghz. "
+            "Invalid channel (%s)" % channel
+        )
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     n_capabilities = [
         hostapd_constants.N_CAPABILITY_SGI20,
         hostapd_constants.N_CAPABILITY_TX_STBC,
-        hostapd_constants.N_CAPABILITY_RX_STBC1
+        hostapd_constants.N_CAPABILITY_RX_STBC1,
     ]
 
-    rates = utils.merge_dicts(hostapd_constants.CCK_AND_OFDM_BASIC_RATES,
-                              hostapd_constants.CCK_AND_OFDM_DATA_RATES)
+    rates = utils.merge_dicts(
+        hostapd_constants.CCK_AND_OFDM_BASIC_RATES,
+        hostapd_constants.CCK_AND_OFDM_DATA_RATES,
+    )
 
     # Atheros Communications, Inc. IE
     # WPS IE
     vendor_elements = {
-        'vendor_elements':
-        'dd0900037f01010000ff7f'
-        'dd260050f204104a0001101044000102104900140024e2600200010160000002000160'
-        '0100020001'
+        "vendor_elements": "dd0900037f01010000ff7f"
+        "dd260050f204104a0001101044000102104900140024e2600200010160000002000160"
+        "0100020001"
     }
 
-    additional_params = utils.merge_dicts(rates, vendor_elements,
-                                          hostapd_constants.UAPSD_ENABLED)
+    additional_params = utils.merge_dicts(
+        rates, vendor_elements, hostapd_constants.UAPSD_ENABLED
+    )
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -463,6 +450,7 @@
         dtim_period=1,
         short_preamble=True,
         n_capabilities=n_capabilities,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
 
     return config
diff --git a/src/antlion/controllers/ap_lib/wireless_network_management.py b/src/antlion/controllers/ap_lib/wireless_network_management.py
index ecd5b3b..62ba34e 100644
--- a/src/antlion/controllers/ap_lib/wireless_network_management.py
+++ b/src/antlion/controllers/ap_lib/wireless_network_management.py
@@ -18,8 +18,9 @@
 
 from antlion.controllers.ap_lib.radio_measurement import NeighborReportElement
 
-BssTransitionCandidateList = NewType('BssTransitionCandidateList',
-                                     List[NeighborReportElement])
+BssTransitionCandidateList = NewType(
+    "BssTransitionCandidateList", List[NeighborReportElement]
+)
 
 
 class BssTerminationDuration:
@@ -50,16 +51,17 @@
     """
 
     def __init__(
-            self,
-            preferred_candidate_list_included: bool = False,
-            abridged: bool = False,
-            disassociation_imminent: bool = False,
-            ess_disassociation_imminent: bool = False,
-            disassociation_timer: int = 0,
-            validity_interval: int = 1,
-            bss_termination_duration: Optional[BssTerminationDuration] = None,
-            session_information_url: Optional[str] = None,
-            candidate_list: Optional[BssTransitionCandidateList] = None):
+        self,
+        preferred_candidate_list_included: bool = False,
+        abridged: bool = False,
+        disassociation_imminent: bool = False,
+        ess_disassociation_imminent: bool = False,
+        disassociation_timer: int = 0,
+        validity_interval: int = 1,
+        bss_termination_duration: Optional[BssTerminationDuration] = None,
+        session_information_url: Optional[str] = None,
+        candidate_list: Optional[BssTransitionCandidateList] = None,
+    ):
         """Create a BSS Transition Management request.
 
         Args:
diff --git a/src/antlion/controllers/arduino_wifi_dongle.py b/src/antlion/controllers/arduino_wifi_dongle.py
deleted file mode 100644
index 18f57e9..0000000
--- a/src/antlion/controllers/arduino_wifi_dongle.py
+++ /dev/null
@@ -1,390 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import os
-import re
-import subprocess
-import threading
-import time
-from datetime import datetime
-
-from serial import Serial
-
-from antlion import logger
-from antlion import signals
-from antlion import utils
-
-MOBLY_CONTROLLER_CONFIG_NAME = 'ArduinoWifiDongle'
-ACTS_CONTROLLER_REFERENCE_NAME = 'arduino_wifi_dongles'
-
-WIFI_DONGLE_EMPTY_CONFIG_MSG = 'Configuration is empty, abort!'
-WIFI_DONGLE_NOT_LIST_CONFIG_MSG = 'Configuration should be a list, abort!'
-
-DEV = '/dev/'
-IP = 'IP: '
-STATUS = 'STATUS: '
-SSID = 'SSID: '
-RSSI = 'RSSI: '
-PING = 'PING: '
-SCAN_BEGIN = 'Scan Begin'
-SCAN_END = 'Scan End'
-READ_TIMEOUT = 10
-BAUD_RATE = 9600
-TMP_DIR = 'tmp/'
-SSID_KEY = 'SSID'
-PWD_KEY = 'password'
-
-
-class ArduinoWifiDongleError(signals.ControllerError):
-    pass
-
-
-def create(configs):
-    """Creates ArduinoWifiDongle objects.
-
-    Args:
-        configs: A list of dicts or a list of serial numbers, each representing
-                 a configuration of a arduino wifi dongle.
-
-    Returns:
-        A list of Wifi dongle objects.
-    """
-    if not configs:
-        raise ArduinoWifiDongleError(WIFI_DONGLE_EMPTY_CONFIG_MSG)
-    elif not isinstance(configs, list):
-        raise ArduinoWifiDongleError(WIFI_DONGLE_NOT_LIST_CONFIG_MSG)
-    elif isinstance(configs[0], str):
-        # Configs is a list of serials.
-        return get_instances(configs)
-    else:
-        # Configs is a list of dicts.
-        return get_instances_with_configs(configs)
-
-
-def destroy(wcs):
-    for wc in wcs:
-        wc.clean_up()
-
-
-def get_instances(configs):
-    wcs = []
-    for s in configs:
-        wcs.append(ArduinoWifiDongle(s))
-    return wcs
-
-
-def get_instances_with_configs(configs):
-    wcs = []
-    for c in configs:
-        try:
-            s = c.pop('serial')
-        except KeyError:
-            raise ArduinoWifiDongleError(
-                '"serial" is missing for ArduinoWifiDongle config %s.' % c)
-        wcs.append(ArduinoWifiDongle(s))
-    return wcs
-
-
-class ArduinoWifiDongle(object):
-    """Class representing an arduino wifi dongle.
-
-    Each object of this class represents one wifi dongle in ACTS.
-
-    Attribtues:
-        serial: Short serial number of the wifi dongle in string.
-        port: The terminal port the dongle is connected to in string.
-        log: A logger adapted from root logger with added token specific to an
-             ArduinoWifiDongle instance.
-        log_file_fd: File handle of the log file.
-        set_logging: Logging for the dongle is enabled when this param is set
-        lock: Lock to acquire and release set_logging variable
-        ssid: SSID of the wifi network the dongle is connected to.
-        ip_addr: IP address on the wifi interface.
-        scan_results: Most recent scan results.
-        ping: Ping status in bool - ping to www.google.com
-    """
-
-    def __init__(self, serial):
-        """Initializes the ArduinoWifiDongle object.
-
-        Args:
-            serial: The serial number for the wifi dongle.
-        """
-        if not serial:
-            raise ArduinoWifiDongleError(
-                'The ArduinoWifiDongle serial number must not be empty.')
-        self.serial = serial
-        self.port = self._get_serial_port()
-        self.log = logger.create_tagged_trace_logger(
-            'ArduinoWifiDongle|%s' % self.serial)
-        log_path_base = getattr(logging, 'log_path', '/tmp/logs')
-        self.log_file_path = os.path.join(
-            log_path_base, 'ArduinoWifiDongle_%s_serial_log.txt' % self.serial)
-        self.log_file_fd = open(self.log_file_path, 'a')
-
-        self.set_logging = True
-        self.lock = threading.Lock()
-        self.start_controller_log()
-
-        self.ssid = None
-        self.ip_addr = None
-        self.status = 0
-        self.scan_results = []
-        self.scanning = False
-        self.ping = False
-
-        os.makedirs(TMP_DIR, exist_ok=True)
-
-    def clean_up(self):
-        """Cleans up the controller and releases any resources it claimed."""
-        self.stop_controller_log()
-        self.log_file_fd.close()
-
-    def _get_serial_port(self):
-        """Get the serial port for a given ArduinoWifiDongle serial number.
-
-        Returns:
-            Serial port in string if the dongle is attached.
-        """
-        cmd = 'ls %s' % DEV
-        serial_ports = utils.exe_cmd(cmd).decode('utf-8', 'ignore').split('\n')
-        for port in serial_ports:
-            if 'USB' not in port:
-                continue
-            tty_port = '%s%s' % (DEV, port)
-            cmd = 'udevadm info %s' % tty_port
-            udev_output = utils.exe_cmd(cmd).decode('utf-8', 'ignore')
-            result = re.search('ID_SERIAL_SHORT=(.*)\n', udev_output)
-            if self.serial == result.group(1):
-                logging.info('Found wifi dongle %s at serial port %s' %
-                             (self.serial, tty_port))
-                return tty_port
-        raise ArduinoWifiDongleError('Wifi dongle %s is specified in config'
-                                     ' but is not attached.' % self.serial)
-
-    def write(self, arduino, file_path, network=None):
-        """Write an ino file to the arduino wifi dongle.
-
-        Args:
-            arduino: path of the arduino executable.
-            file_path: path of the ino file to flash onto the dongle.
-            network: wifi network to connect to.
-
-        Returns:
-            True: if the write is sucessful.
-            False: if not.
-        """
-        return_result = True
-        self.stop_controller_log('Flashing %s\n' % file_path)
-        cmd = arduino + file_path + ' --upload --port ' + self.port
-        if network:
-            cmd = self._update_ino_wifi_network(arduino, file_path, network)
-        self.log.info('Command is %s' % cmd)
-        proc = subprocess.Popen(cmd,
-                                stdout=subprocess.PIPE, stderr=subprocess.PIPE,
-                                shell=True)
-        _, _ = proc.communicate()
-        return_code = proc.returncode
-        if return_code != 0:
-            self.log.error('Failed to write file %s' % return_code)
-            return_result = False
-        self.start_controller_log('Flashing complete\n')
-        return return_result
-
-    def _update_ino_wifi_network(self, arduino, file_path, network):
-        """Update wifi network in the ino file.
-
-        Args:
-            arduino: path of the arduino executable.
-            file_path: path of the ino file to flash onto the dongle
-            network: wifi network to update the ino file with
-
-        Returns:
-            cmd: arduino command to run to flash the .ino file
-        """
-        tmp_file = '%s%s' % (TMP_DIR, file_path.split('/')[-1])
-        utils.exe_cmd('cp %s %s' % (file_path, tmp_file))
-        ssid = network[SSID_KEY]
-        pwd = network[PWD_KEY]
-        sed_cmd = 'sed -i \'s/"wifi_tethering_test"/"%s"/\' %s' % (
-            ssid, tmp_file)
-        utils.exe_cmd(sed_cmd)
-        sed_cmd = 'sed -i  \'s/"password"/"%s"/\' %s' % (pwd, tmp_file)
-        utils.exe_cmd(sed_cmd)
-        cmd = "%s %s --upload --port %s" % (arduino, tmp_file, self.port)
-        return cmd
-
-    def start_controller_log(self, msg=None):
-        """Reads the serial port and writes the data to ACTS log file.
-
-        This method depends on the logging enabled in the .ino files. The logs
-        are read from the serial port and are written to the ACTS log after
-        adding a timestamp to the data.
-
-        Args:
-            msg: Optional param to write to the log file.
-        """
-        if msg:
-            curr_time = str(datetime.now())
-            self.log_file_fd.write(curr_time + ' INFO: ' + msg)
-        t = threading.Thread(target=self._start_log)
-        t.daemon = True
-        t.start()
-
-    def stop_controller_log(self, msg=None):
-        """Stop the controller log.
-
-        Args:
-            msg: Optional param to write to the log file.
-        """
-        with self.lock:
-            self.set_logging = False
-        if msg:
-            curr_time = str(datetime.now())
-            self.log_file_fd.write(curr_time + ' INFO: ' + msg)
-
-    def _start_log(self):
-        """Target method called by start_controller_log().
-
-        This method is called as a daemon thread, which continuously reads the
-        serial port. Stops when set_logging is set to False or when the test
-        ends.
-        """
-        self.set_logging = True
-        ser = Serial(self.port, BAUD_RATE)
-        while True:
-            curr_time = str(datetime.now())
-            data = ser.readline().decode('utf-8', 'ignore')
-            self._set_vars(data)
-            with self.lock:
-                if not self.set_logging:
-                    break
-            self.log_file_fd.write(curr_time + " " + data)
-
-    def _set_vars(self, data):
-        """Sets the variables by reading from the serial port.
-
-        Wifi dongle data such as wifi status, ip address, scan results
-        are read from the serial port and saved inside the class.
-
-        Args:
-            data: New line from the serial port.
-        """
-        # 'data' represents each line retrieved from the device's serial port.
-        # since we depend on the serial port logs to get the attributes of the
-        # dongle, every line has the format of {ino_file: method: param: value}.
-        # We look for the attribute in the log and retrieve its value.
-        # Ex: data = "connect_wifi: loop(): STATUS: 3" then val = "3"
-        # Similarly, we check when the scan has begun and ended and get all the
-        # scan results in between.
-        if data.count(':') != 3:
-            return
-        val = data.split(':')[-1].lstrip().rstrip()
-        if SCAN_BEGIN in data:
-            self.scan_results = []
-            self.scanning = True
-        elif SCAN_END in data:
-            self.scanning = False
-        elif self.scanning:
-            self.scan_results.append(data)
-        elif IP in data:
-            self.ip_addr = None if val == '0.0.0.0' else val
-        elif SSID in data:
-            self.ssid = val
-        elif STATUS in data:
-            self.status = int(val)
-        elif PING in data:
-            self.ping = int(val) != 0
-
-    def ip_address(self, exp_result=True, timeout=READ_TIMEOUT):
-        """Get the ip address of the wifi dongle.
-
-        Args:
-            exp_result: True if IP address is expected (wifi connected).
-            timeout: Optional param that specifies the wait time for the IP
-                     address to come up on the dongle.
-
-        Returns:
-            IP: addr in string, if wifi connected.
-                None if not connected.
-        """
-        curr_time = time.time()
-        while time.time() < curr_time + timeout:
-            if (exp_result and self.ip_addr) or (
-                    not exp_result and not self.ip_addr):
-                break
-            time.sleep(1)
-        return self.ip_addr
-
-    def wifi_status(self, exp_result=True, timeout=READ_TIMEOUT):
-        """Get wifi status on the dongle.
-
-        Returns:
-            True: if wifi is connected.
-            False: if not connected.
-        """
-        curr_time = time.time()
-        while time.time() < curr_time + timeout:
-            if (exp_result and self.status == 3) or (
-                    not exp_result and not self.status):
-                break
-            time.sleep(1)
-        return self.status == 3
-
-    def wifi_scan(self, exp_result=True, timeout=READ_TIMEOUT):
-        """Get the wifi scan results.
-
-        Args:
-            exp_result: True if scan results are expected.
-            timeout: Optional param that specifies the wait time for the scan
-                     results to come up on the dongle.
-
-        Returns:
-            list of dictionaries each with SSID and RSSI of the network
-            found in the scan.
-        """
-        scan_networks = []
-        d = {}
-        curr_time = time.time()
-        while time.time() < curr_time + timeout:
-            if (exp_result and self.scan_results) or (
-                    not exp_result and not self.scan_results):
-                break
-            time.sleep(1)
-        for i in range(len(self.scan_results)):
-            if SSID in self.scan_results[i]:
-                d.clear()
-                d[SSID] = self.scan_results[i].split(':')[-1].rstrip()
-            elif RSSI in self.scan_results[i]:
-                d[RSSI] = self.scan_results[i].split(':')[-1].rstrip()
-                scan_networks.append(d)
-
-        return scan_networks
-
-    def ping_status(self, exp_result=True, timeout=READ_TIMEOUT):
-        """ Get ping status on the dongle.
-
-        Returns:
-            True: if ping is successful
-            False: if not successful
-        """
-        curr_time = time.time()
-        while time.time() < curr_time + timeout:
-            if (exp_result and self.ping) or (not exp_result and not self.ping):
-                break
-            time.sleep(1)
-        return self.ping
diff --git a/src/antlion/controllers/asus_axe11000_ap.py b/src/antlion/controllers/asus_axe11000_ap.py
deleted file mode 100644
index d4372ac..0000000
--- a/src/antlion/controllers/asus_axe11000_ap.py
+++ /dev/null
@@ -1,763 +0,0 @@
-"""Controller for Asus AXE11000 access point."""
-
-import time
-from antlion import logger
-from selenium import webdriver
-from selenium.common.exceptions import NoSuchElementException
-from selenium.webdriver.chrome.options import Options
-from selenium.webdriver.support.ui import Select
-
-MOBLY_CONTROLLER_CONFIG_NAME = "AsusAXE11000AP"
-ACTS_CONTROLLER_REFERENCE_NAME = "access_points"
-
-# Access point UI parameters
-USERNAME = "login_username"
-PASSWORD = "login_passwd"
-SIGN_IN_ID = "button"
-APPLY_BUTTON = "apply_btn"
-APPLY_BUTTON_ID = "applyButton"
-WIRELESS_SETTINGS = "Advanced_Wireless_Content_menu"
-GENERAL_TAB = "Advanced_Wireless_Content_tab"
-PROFESSIONAL_TAB = "Advanced_WAdvanced_Content_tab"
-HE_MODE_ID = "he_mode_field"
-WL_UNIT = "wl_unit"
-WL_11AX = "wl_11ax"
-WL_RADIO = "wl_radio"
-WL_CLOSED = "wl_closed"
-RADIO = "radio"
-BAND_2G_CHANNEL = "band0_channel"
-BAND_5G_CHANNEL = "band1_channel"
-BAND_6G_CHANNEL = "band2_channel"
-BAND_2G_AUTH = "band0_auth_mode_x"
-BAND_5G_AUTH = "band1_auth_mode_x"
-BAND_6G_AUTH = "band2_auth_mode_x"
-BAND_2G_SSID = "band0_ssid"
-BAND_5G_SSID = "band1_ssid"
-BAND_6G_SSID = "band2_ssid"
-BAND_2G_PSK = "band0_wpa_psk"
-BAND_5G_PSK = "band1_wpa_psk"
-BAND_6G_PSK = "band2_wpa_psk"
-BAND_2G_RAD_IP = "band0_radius_ipaddr"
-BAND_5G_RAD_IP = "band1_radius_ipaddr"
-BAND_2G_RAD_PORT = "band0_radius_port"
-BAND_5G_RAD_PORT = "band1_radius_port"
-BAND_2G_RAD_KEY = "band0_radius_key"
-BAND_5G_RAD_KEY = "band1_radius_key"
-SMART_CONNECT = "smartcon_enable_field"
-BROWSER_WAIT_SHORT_TIMEOUT = 6
-BROWSER_WAIT_TIMEOUT = 15
-BROWSER_WAIT_LONG_TIMEOUT = 90
-BROWSER_WAIT_VERY_LONG_TIMEOUT = 180
-
-# Access point supported modes, channels
-VALID_BANDS = ["2g", "5g", "6g"]
-WL_BAND_VALUE = {"2g": "0", "5g": "1", "6g": "2"}
-CHANNELS_2G = {
-    0: "0",
-    1: "1",
-    2: "2",
-    3: "3",
-    4: "4",
-    5: "5",
-    6: "6",
-    7: "7",
-    8: "8",
-    9: "9",
-    10: "10",
-    11: "11"
-}
-CHANNELS_5G = {
-    0: "0",
-    36: "36/160",
-    40: "40/160",
-    44: "44/160",
-    48: "48/160",
-    52: "52/160",
-    56: "56/160",
-    60: "60/160",
-    64: "64/160",
-    100: "100/160",
-    104: "104/160",
-    108: "108/160",
-    112: "112/160",
-    116: "116/160",
-    120: "120/160",
-    124: "124/160",
-    128: "128/160",
-    132: "132/80",
-    136: "136/80",
-    140: "140/80",
-    144: "144/80",
-    149: "149/80",
-    153: "153/80",
-    157: "157/80",
-    161: "161/80",
-    165: "165"
-}
-CHANNELS_6G = {
-    0: "0",
-    37: "6g37/160",
-    53: "6g53/160",
-    69: "6g69/160",
-    85: "6g85/160",
-    101: "6g101/160",
-    117: "6g117/160",
-    133: "6g133/160",
-    149: "6g149/160",
-    165: "6g165/160",
-    181: "6g181/160",
-    197: "6g197/160",
-    213: "6g213/160"
-}
-
-
-def create(configs):
-  """Creates ap controllers from a json config."""
-  return [AsusAXE11000AP(c) for c in configs]
-
-
-def destroy(aps):
-  """Destroys a list of ap controllers."""
-  for ap in aps:
-    ap.reset_to_default_ap_settings()
-    ap.driver.quit()
-
-
-class AsusAXE11000AP(object):
-  """Asus AXE11000 AccessPoint controller.
-
-  Controller class for Asus AXE11000 6GHz AP. This class provides methods to
-  configure the AP with different settings required for 11ax and 6GHz testing.
-  The controller uses chrome webdriver to communicate with the AP.
-
-  The controller object is initiated in the test class. The ACTS test runner
-  calls this controller using the 'AsusAXE11000AP' keyword in the ACTS config
-  file. The AP is reset to default settings and this is handled during the
-  test teardown.
-
-  Attributes:
-    ip: IP address to reach the AP.
-    port: Port numnber to reach the AP.
-    protocol: Protcol to reach the AP (http/https).
-    username: Username to login to the AP.
-    password: Password to login to the AP.
-    config_page: web url to login to the AP.
-    ap_settings: AP settings configured at any given point.
-    default_ap_settings: Default AP settings before running the tests.
-    driver: chrome webdriver object to update the settings.
-  """
-
-  def __init__(self, config):
-    """Initialize AP.
-
-    Creates a chrome webdriver object based on the router parameters.
-    The webdriver will login to the router and goes to the wireless settings
-    page. This object will be used to change the router settings required for
-    the test cases. Required parameters are <ip_address>, <port>, <protocol>,
-    <admin_username> and <admin_password>.
-
-    Url: <procotol>://<ip_address>:<port>/Main_Login.asp
-    Login: <admin_username>/<admin_password>
-
-    Args:
-      config: dict, dictionary of router parameters required for webdriver.
-    """
-    self.ip = config["ip_address"]
-    self.port = config["port"]
-    self.protocol = config["protocol"]
-    self.username = config["admin_username"]
-    self.password = config["admin_password"]
-    lambda_msg = lambda msg: "[AsusAXE11000AP|%s] %s" % (self.ip, msg)
-    self.log = logger.create_logger(lambda_msg)
-    self.ap_settings = {"2g": {}, "5g": {}, "6g": {},}
-    self.config_page = (
-        "{protocol}://{ip_address}:{port}/Main_Login.asp").format(
-            protocol=self.protocol, ip_address=self.ip, port=self.port)
-    self.chrome_options = Options()
-    self.chrome_options.add_argument("--headless")
-    self.chrome_options.add_argument("--no-sandbox")
-    self.driver = webdriver.Chrome(options=self.chrome_options)
-    self.driver.implicitly_wait(BROWSER_WAIT_TIMEOUT*2)
-    self.driver.get(self.config_page)
-    self.driver.find_element_by_name(USERNAME).send_keys(self.username)
-    self.driver.find_element_by_name(PASSWORD).send_keys(self.password)
-    self.driver.find_element_by_id(SIGN_IN_ID).click()
-    self._wait_for_web_element(self.driver.find_element_by_id,
-                               WIRELESS_SETTINGS)
-    self.driver.find_element_by_id(WIRELESS_SETTINGS).click()
-    self._wait_for_web_element(self.driver.find_element_by_id, SMART_CONNECT)
-    self._update_ap_settings()
-    self.default_ap_settings = self.ap_settings.copy()
-
-  ### Helper methods ###
-
-  def _wait_for_web_element(self,
-                            find_element,
-                            element,
-                            attribute=None,
-                            value=None):
-    """Verifies click actions/selections work.
-
-    Args:
-      find_element: func(), webdriver method to call
-      element: str, web element to look for. Ex: id, class, name
-      attribute: str, attribute to get from a webelement
-      value: str, verify attribute is set to the correct value
-
-    Raises:
-      ValueError: An error occurred if expected attribute not found.
-    """
-    curr_time = time.time()
-    while time.time() < curr_time + BROWSER_WAIT_TIMEOUT*4:
-      time.sleep(2)
-      try:
-        x = find_element(element)
-        if attribute and str(value) not in x.get_attribute(attribute):
-          raise ValueError("Attribute is not set to the right value")
-        return
-      except NoSuchElementException:
-        pass
-    raise ValueError("Failed to find web element: %s" % element)
-
-  def _update_ap_settings_2g_band(self):
-    """Read settings configured on 2g band.
-
-    Parameters Updated:
-      security type: open, wpa2-psk, wpa3-sae or wpa2-ent.
-      ssid: SSID of the wifi network.
-      password: password of the wifi network (if psk or sae network).
-      radius server ip: Radius server IP addr (if ent network).
-      radius server port: Radius server Port number (if ent network).
-      radius server secret: Radius server secret (if ent network).
-      channel: 2G band channel.
-    """
-    dict_2g = {}
-    dict_2g["security"] = self.driver.find_element_by_name(
-        BAND_2G_AUTH).get_attribute("value")
-    dict_2g["SSID"] = self.driver.find_element_by_name(
-        BAND_2G_SSID).get_attribute("value")
-    if dict_2g["security"] == "psk2" or dict_2g["security"] == "sae":
-      dict_2g["password"] = self.driver.find_element_by_name(
-          BAND_2G_PSK).get_attribute("value")
-    elif dict_2g["security"] == "wpa2":
-      dict_2g["radius_ip_addr"] = self.driver.find_element_by_name(
-          BAND_2G_RAD_IP).get_attribute("value")
-      dict_2g["radius_port"] = self.driver.find_element_by_name(
-          BAND_2G_RAD_PORT).get_attribute("value")
-      dict_2g["radius_secret"] = self.driver.find_element_by_name(
-          BAND_2G_RAD_KEY).get_attribute("value")
-    channel_field = self._get_webdriver_elements_for_channels("2g")
-    ch_val = self.driver.find_element_by_name(channel_field).get_attribute(
-        "value")
-    channel = 0
-    for key, val in CHANNELS_2G.items():
-      if val == ch_val:
-        channel = key
-        break
-    self.ap_settings["2g"] = dict_2g.copy()
-    self.ap_settings["2g"]["channel"] = channel
-
-  def _update_ap_settings_5g_band(self):
-    """Read settings configured on 5g band.
-
-    Parameters Updated:
-      security type: open, wpa2-psk, wpa3-sae or wpa2-ent.
-      ssid: SSID of the wifi network.
-      password: password of the wifi network (if psk or sae network).
-      radius server ip: Radius server IP addr (if ent network).
-      radius server port: Radius server Port number (if ent network).
-      radius server secret: Radius server secret (if ent network).
-      channel: 5G band channel.
-    """
-    dict_5g = {}
-    dict_5g["security"] = self.driver.find_element_by_name(
-        BAND_5G_AUTH).get_attribute("value")
-    dict_5g["SSID"] = self.driver.find_element_by_name(
-        BAND_5G_SSID).get_attribute("value")
-    if dict_5g["security"] == "psk2" or dict_5g["security"] == "sae":
-      dict_5g["password"] = self.driver.find_element_by_name(
-          BAND_5G_PSK).get_attribute("value")
-    elif dict_5g["security"] == "wpa2":
-      dict_5g["radius_ip_addr"] = self.driver.find_element_by_name(
-          BAND_5G_RAD_IP).get_attribute("value")
-      dict_5g["radius_port"] = self.driver.find_element_by_name(
-          BAND_5G_RAD_PORT).get_attribute("value")
-      dict_5g["radius_secret"] = self.driver.find_element_by_name(
-          BAND_5G_RAD_KEY).get_attribute("value")
-    channel_field = self._get_webdriver_elements_for_channels("5g")
-    ch_val = self.driver.find_element_by_name(channel_field).get_attribute(
-        "value")
-    channel = 0
-    for key, val in CHANNELS_5G.items():
-      if val == ch_val:
-        channel = key
-        break
-    self.ap_settings["5g"] = dict_5g.copy()
-    self.ap_settings["5g"]["channel"] = channel
-
-  def _update_ap_settings_6g_band(self):
-    """Read settings configured on 6g band.
-
-    Parameters Updated:
-      security type: wpa3-owe, wpa3-sae.
-      ssid: SSID of the wifi network.
-      password: password of the wifi network (if sae network).
-      channel: 6G band channel.
-    """
-    dict_6g = {}
-    dict_6g["security"] = self.driver.find_element_by_name(
-        BAND_6G_AUTH).get_attribute("value")
-    dict_6g["SSID"] = self.driver.find_element_by_name(
-        BAND_6G_SSID).get_attribute("value")
-    if dict_6g["security"] == "sae":
-      dict_6g["password"] = self.driver.find_element_by_name(
-          BAND_6G_PSK).get_attribute("value")
-    channel_field = self._get_webdriver_elements_for_channels("6g")
-    ch_val = self.driver.find_element_by_name(channel_field).get_attribute(
-        "value")
-    channel = 0
-    for key, val in CHANNELS_6G.items():
-      if val == ch_val:
-        channel = key
-        break
-    self.ap_settings["6g"] = dict_6g.copy()
-    self.ap_settings["6g"]["channel"] = channel
-
-  def _update_ap_settings(self):
-    """Read AP settings of 2G, 5G and 6G bands.
-
-    This method reads the wifi network currently configured on any particular
-    band. The settings are updated to self.ap_settings object.
-    """
-    self.driver.refresh()
-    self._update_ap_settings_2g_band()
-    self._update_ap_settings_5g_band()
-    self._update_ap_settings_6g_band()
-
-  def _get_webdriver_elements_for_channels(self, band):
-    """Return webdriver elements for the band to configure channel.
-
-    Args:
-      band: str, Wifi band to configure. Ex: 2g, 5g, 6g.
-
-    Returns:
-      channel field for the specific band.
-    """
-    channel_field = BAND_2G_CHANNEL
-    if band == "5g":
-      channel_field = BAND_5G_CHANNEL
-    elif band == "6g":
-      channel_field = BAND_6G_CHANNEL
-    return channel_field
-
-  def _set_channel(self, band, channel):
-    """Configure channel on a specific band.
-
-    Args:
-      band: str, Wifi band to check. Ex: 2g, 5g, 6g.
-      channel: int, Channel to set.
-
-    Raises:
-      ValueError: An error occurred due to invalid band or configuration.
-    """
-    band = band.lower()
-    if band not in VALID_BANDS:
-      raise ValueError("Band %s is not valid" % band)
-    if (band == "2g" and channel not in CHANNELS_2G) or (
-        band == "5g" and
-        channel not in CHANNELS_5G) or (band == "6g" and
-                                        channel not in CHANNELS_6G):
-      raise ValueError("Channel %s is not supported in band %s" %
-                       (channel, band))
-    channel_field = self._get_webdriver_elements_for_channels(band)
-    channels_val_dict = CHANNELS_6G
-    if band == "2g":
-      channels_val_dict = CHANNELS_2G
-    elif band == "5g":
-      channels_val_dict = CHANNELS_5G
-    channel = channels_val_dict[channel]
-
-    # Set channel
-    if self.driver.find_element_by_name(channel_field).get_attribute(
-        "value") != channel:
-      css_selector = "select[name=%s]" % channel_field
-      Select(self.driver.find_element_by_css_selector(
-          css_selector)).select_by_value(channel)
-      time.sleep(BROWSER_WAIT_SHORT_TIMEOUT)
-
-  def _configure_personal_network(self, band, auth, ssid=None, password=None):
-    """Configure wpa3 sae/wpa2 psk network on a specific band.
-
-    Args:
-      band: str, Wifi band to check. Ex: 2g, 5g, 6g.
-      auth: str, WPA2 PSK or WPA3 SAE security.
-      ssid: str, ssid of the wifi network.
-      password: str, password of the wifi network.
-
-    Raises:
-      ValueError: An error occurred due to invalid band or configuration.
-    """
-    band = band.lower()
-    if band not in VALID_BANDS:
-      raise ValueError("Band %s is not valid" % band)
-    if band == "6g" and auth == "psk2":
-      raise ValueError("AP doesn't support WPA2 PSK on 6g band.")
-    (auth_field, ssid_field,
-     psk_field) = self._get_webdriver_elements_for_personal_auth(band)
-
-    # configure personal network
-    css_selector = "select[name=%s]" % auth_field
-    Select(self.driver.find_element_by_css_selector(
-        css_selector)).select_by_value(auth)
-    time.sleep(BROWSER_WAIT_SHORT_TIMEOUT)
-    if ssid:
-      self.driver.find_element_by_name(ssid_field).clear()
-      self.driver.find_element_by_name(ssid_field).send_keys(ssid)
-    if password:
-      self.driver.find_element_by_name(psk_field).clear()
-      self.driver.find_element_by_name(psk_field).send_keys(password)
-
-  def _configure_open_owe_network(self, band, auth, ssid=None):
-    """Configure wpa3 owe/open network on a specific band.
-
-    Args:
-      band: str, Wifi band to check. Ex: 2g, 5g, 6g.
-      auth: str, WPA2 PSK or WPA3 SAE security.
-      ssid: str, ssid of the wifi network.
-
-    Raises:
-      ValueError: An error occurred due to invalid band or configuration.
-    """
-    band = band.lower()
-    if band not in VALID_BANDS:
-      raise ValueError("Band %s is not valid" % band)
-    if band == "6g" and auth == "open":
-      raise ValueError("AP doesn't support open network on 6g band.")
-    if (band == "2g" or band == "5g") and auth == "owe":
-      raise ValueError("AP doesn't support OWE on 2g and 5g bands.")
-    (auth_field, ssid_field,
-     _) = self._get_webdriver_elements_for_personal_auth(band)
-
-    # Configure wifi network
-    css_selector = "select[name=%s]" % auth_field
-    Select(self.driver.find_element_by_css_selector(
-        css_selector)).select_by_value(auth)
-    time.sleep(BROWSER_WAIT_SHORT_TIMEOUT)
-    if ssid:
-      self.driver.find_element_by_name(ssid_field).clear()
-      self.driver.find_element_by_name(ssid_field).send_keys(ssid)
-
-  def _configure_wpa2_ent_network(self, band, radius_ip, radius_port,
-                                  radius_secret, ssid=None):
-    """Configure wpa2 ent network on a specific band.
-
-    Args:
-      band: str, Wifi band to check. Ex: 2g, 5g.
-      radius_ip: str, radius server ip addr.
-      radius_port: str, radius server port number.
-      radius_secret: str, radius server secret.
-      ssid: str, ssid of the wifi network.
-
-    Raises:
-      ValueError: An error occurred due to invalid band or configuration.
-    """
-    band = band.lower()
-    if band not in VALID_BANDS:
-      raise ValueError("Band %s is not valid" % band)
-    if band == "6g":
-      raise ValueError("6GHz doesn't support enterprise network on this AP.")
-    (auth_field, ssid_field,
-     _) = self._get_webdriver_elements_for_personal_auth(band)
-    (rad_ip_field, rad_port_field,
-     rad_key_field) = self._get_webdriver_elements_for_ent_auth(band)
-
-    # Set enterprise network
-    css_selector = "select[name=%s]" % auth_field
-    Select(self.driver.find_element_by_css_selector(
-        css_selector)).select_by_value("wpa2")
-    time.sleep(BROWSER_WAIT_SHORT_TIMEOUT)
-    if ssid:
-      self.driver.find_element_by_name(ssid_field).clear()
-      self.driver.find_element_by_name(ssid_field).send_keys(ssid)
-    self.driver.find_element_by_name(rad_ip_field).clear()
-    self.driver.find_element_by_name(rad_ip_field).send_keys(radius_ip)
-    self.driver.find_element_by_name(rad_port_field).clear()
-    self.driver.find_element_by_name(rad_port_field).send_keys(radius_port)
-    self.driver.find_element_by_name(rad_key_field).clear()
-    self.driver.find_element_by_name(rad_key_field).send_keys(radius_secret)
-
-  def _get_webdriver_elements_for_personal_auth(self, band):
-    """Return webdriver elements for the band to configure personal auth.
-
-    Args:
-      band: str, Wifi band to configure. Ex: 2g, 5g, 6g.
-
-    Returns:
-      tuple of auth, ssid, psk field for the band.
-    """
-    auth_field = BAND_2G_AUTH
-    ssid_field = BAND_2G_SSID
-    psk_field = BAND_2G_PSK
-    if band == "5g":
-      auth_field = BAND_5G_AUTH
-      ssid_field = BAND_5G_SSID
-      psk_field = BAND_5G_PSK
-    elif band == "6g":
-      auth_field = BAND_6G_AUTH
-      ssid_field = BAND_6G_SSID
-      psk_field = BAND_6G_PSK
-    return (auth_field, ssid_field, psk_field)
-
-  def _get_webdriver_elements_for_ent_auth(self, band):
-    """Return webdriver elements for the band to configure ent auth.
-
-    Args:
-      band: str, Wifi band to configure. Ex: 2g, 5g, 6g.
-
-    Returns:
-      tuple of radius server IP, port, secret for the band.
-    """
-    rad_ip_field = BAND_2G_RAD_IP
-    rad_port_field = BAND_2G_RAD_PORT
-    rad_key_field = BAND_2G_RAD_KEY
-    if band == "5g":
-      rad_ip_field = BAND_5G_RAD_IP
-      rad_port_field = BAND_5G_RAD_PORT
-      rad_key_field = BAND_5G_RAD_KEY
-    return (rad_ip_field, rad_port_field, rad_key_field)
-
-  ### Methods to configure AP ###
-
-  def set_channel_and_apply(self, band, channel):
-    """Set channel for specific band.
-
-    Args:
-      band: str, Wifi band to check. Ex: 2g, 5g, 6g.
-      channel: int, Channel to set.
-    """
-    # Go back to General tab in advanced settings
-    self.driver.find_element_by_id(GENERAL_TAB).click()
-    self._wait_for_web_element(self.driver.find_element_by_id, SMART_CONNECT)
-
-    channel_field = self._get_webdriver_elements_for_channels(band)
-    self._set_channel(band, channel)
-    self.driver.find_element_by_id(APPLY_BUTTON_ID).click()
-    time.sleep(BROWSER_WAIT_LONG_TIMEOUT)
-    self._wait_for_web_element(self.driver.find_element_by_name,
-                               channel_field, "value", channel)
-    self._update_ap_settings()
-
-  def get_configured_channel(self, band):
-    """Get the channel configured on specific band.
-
-    Args:
-      band: str, Wifi band to check. Ex: eg, 5g, 6g.
-
-    Returns:
-      Channel configured on the band.
-
-    Raises:
-      ValueError: An error occurred due to invalid band.
-    """
-    band = band.lower()
-    if band not in VALID_BANDS:
-      raise ValueError("Band %s is not valid" % band)
-    return self.ap_settings[band]["channel"]
-
-  def configure_ap(self, network_dict):
-    """Configure AP with settings for different bands.
-
-    Args:
-      network_dict: dict, dictionary that holds configuration for each band.
-    """
-    # Go back to General tab in advanced settings
-    self.driver.refresh()
-    self.driver.find_element_by_id(GENERAL_TAB).click()
-    self._wait_for_web_element(self.driver.find_element_by_id, SMART_CONNECT)
-
-    # configure wireless settings
-    self.log.info("Network dictionary: %s" % network_dict)
-    for band in network_dict:
-      security = network_dict[band]["security"]
-      ssid = network_dict[band]["SSID"] if "SSID" in network_dict[
-          band] else None
-      password = network_dict[band]["password"] if "password" in network_dict[
-          band] else None
-      if security == "open" or security == "owe":
-        self._configure_open_owe_network(band, security, ssid)
-      elif security == "psk2" or security == "sae":
-        self._configure_personal_network(band, security, ssid, password)
-      elif network_dict[band]["security"] == "wpa2":
-        self._configure_wpa2_ent_network(
-            band,
-            network_dict[band]["radius_server_ip"],
-            network_dict[band]["radius_server_port"],
-            network_dict[band]["radius_server_secret"],
-            ssid)
-
-    for band in network_dict:
-      if "channel" in network_dict[band]:
-        self._set_channel(band, network_dict[band]["channel"])
-    self.driver.find_element_by_id(APPLY_BUTTON_ID).click()
-    time.sleep(BROWSER_WAIT_LONG_TIMEOUT)
-
-    # update ap settings
-    self._update_ap_settings()
-
-    # configure hidden or 11ax mode
-    for band in network_dict:
-      apply_settings = False
-      if "hidden" in network_dict[band]:
-        res = self._configure_hidden_network(band, network_dict[band]["hidden"])
-        apply_settings = apply_settings or res
-      if "11ax" in network_dict[band]:
-        res = self._configure_11ax_mode(band, network_dict[band]["11ax"])
-        apply_settings = apply_settings or res
-      if apply_settings:
-        self.driver.find_element_by_id(APPLY_BUTTON).click()
-        time.sleep(BROWSER_WAIT_VERY_LONG_TIMEOUT)
-
-  def get_wifi_network(self, band):
-    """Get wifi network configured on the AP for the specific band.
-
-    Args:
-      band: str, Wifi band to check. Ex: 2g, 5g, 6g.
-
-    Returns:
-      Wifi network as a dictionary.
-
-    Raises:
-      ValueError: An error occurred due to invalid band.
-    """
-    band = band.lower()
-    if band not in VALID_BANDS:
-      raise ValueError("Band %s is not valid" % band)
-    wifi_network = {}
-    wifi_network["SSID"] = self.ap_settings[band]["SSID"]
-    if "password" in self.ap_settings[band]:
-      wifi_network["password"] = self.ap_settings[band]["password"]
-    security = self.ap_settings[band]["security"]
-    if security == "sae" or security == "owe":
-      wifi_network["security"] = security
-    return wifi_network
-
-  def _configure_hidden_network(self, band, val):
-    """Configure hidden network for a specific band.
-
-    Args:
-      band: str, Wifi band to configure hidden network.
-      val: str, String value to configure.
-
-    Returns:
-      True if settings applied, False if not.
-
-    Raises:
-      ValueError: An error occurred due to invalid band.
-    """
-    band = band.lower()
-    if band not in VALID_BANDS:
-      raise ValueError("Band %s is not valid" % band)
-
-    # Go to Professional tab in advanced settings
-    self.driver.find_element_by_id(PROFESSIONAL_TAB).click()
-    self._wait_for_web_element(self.driver.find_element_by_id, HE_MODE_ID)
-
-    # Select the requested band from the drop down menu
-    css_selector = "select[name=%s]" % WL_UNIT
-    Select(
-        self.driver.find_element_by_css_selector(css_selector)).select_by_value(
-            WL_BAND_VALUE[band])  # (TODO: gmoturu@) find if selection worked
-    time.sleep(BROWSER_WAIT_SHORT_TIMEOUT)
-
-    # Configure hidden network
-    state = True if val == "1" else False
-    return_result = False
-    if self.driver.find_element_by_name(WL_CLOSED).is_selected() != state:
-      css_selector = "input[name='%s'][value='%s']" % (WL_CLOSED, val)
-      self.driver.find_element_by_css_selector(css_selector).click()
-      time.sleep(BROWSER_WAIT_SHORT_TIMEOUT)
-      return_result = True
-
-    return return_result
-
-  def configure_hidden_network_and_apply(self, band, state=True):
-    """Configure hidden network for a specific band.
-
-    Args:
-      band: str, Wifi band to configure hidden network.
-      state: bool, Set the wifi network as hidden if True, False if not.
-    """
-    val = "1" if state else "0"
-    if self._configure_hidden_network(band, val):
-      self.driver.find_element_by_id(APPLY_BUTTON).click()
-      time.sleep(BROWSER_WAIT_VERY_LONG_TIMEOUT)
-      if self.driver.find_element_by_name(WL_CLOSED).is_selected() != state:
-        raise ValueError("Failed to configure hidden network on band: %s" % band)
-
-      # Go back to General tab in advanced settings
-      self.driver.find_element_by_id(GENERAL_TAB).click()
-      self._wait_for_web_element(self.driver.find_element_by_id, SMART_CONNECT)
-
-  def _configure_11ax_mode(self, band, val):
-    """Configure 11ax mode on a specific band.
-
-    Args:
-      band: str, Wifi band to check. Ex: 2g, 5g, 6g.
-      val: str, String value to configure.
-
-    Returns:
-      True if settings are applied, False if not.
-
-    Raises:
-      ValueError: An error occurred due to invalid band.
-    """
-    band = band.lower()
-    if band not in VALID_BANDS:
-      raise ValueError("Band %s is not valid" % band)
-
-    # Go to Professional tab in advanced settings
-    self.driver.find_element_by_id(PROFESSIONAL_TAB).click()
-    self._wait_for_web_element(self.driver.find_element_by_id, HE_MODE_ID)
-
-    # Select the requested band from the drop down menu
-    css_selector = "select[name=%s]" % WL_UNIT
-    Select(
-        self.driver.find_element_by_css_selector(css_selector)).select_by_value(
-            WL_BAND_VALUE[band])  # (TODO: gmoturu@) find if selection worked
-    time.sleep(BROWSER_WAIT_SHORT_TIMEOUT)
-
-    # Configure 11ax
-    return_result = False
-    if self.driver.find_element_by_name(WL_11AX).get_attribute(
-        "value") != val:
-      css_selector = "select[name=%s]" % WL_11AX
-      Select(self.driver.find_element_by_css_selector(
-          css_selector)).select_by_value(val)
-      time.sleep(BROWSER_WAIT_SHORT_TIMEOUT)
-      return_result = True
-
-    return return_result
-
-  def configure_11ax_mode_and_apply(self, band, state=True):
-    """Configure 11ax mode on a specific band.
-
-    Args:
-      band: str, Wifi band to check. Ex: 2g, 5g, 6g.
-      state: bool, Enable 11ax if True, disable if False
-    """
-    val = "1" if state else "0"
-    if self._configure_11ax_mode(band, val):
-      self.driver.find_element_by_id(APPLY_BUTTON).click()
-      time.sleep(BROWSER_WAIT_VERY_LONG_TIMEOUT)
-      self._wait_for_web_element(self.driver.find_element_by_name, WL_11AX,
-                                 "value", val)
-
-      # Go back to General tab in advanced settings
-      self.driver.find_element_by_id(GENERAL_TAB).click()
-      self._wait_for_web_element(self.driver.find_element_by_id, SMART_CONNECT)
-
-  def reset_to_default_ap_settings(self):
-    """Reset AP to the default settings."""
-    if self.default_ap_settings != self.ap_settings:
-      self.configure_ap(self.default_ap_settings)
-
diff --git a/src/antlion/controllers/attenuator.py b/src/antlion/controllers/attenuator.py
index ff68f5b..440e90a 100644
--- a/src/antlion/controllers/attenuator.py
+++ b/src/antlion/controllers/attenuator.py
@@ -20,21 +20,23 @@
 from antlion.keys import Config
 from antlion.libs.proc import job
 
-MOBLY_CONTROLLER_CONFIG_NAME = 'Attenuator'
-ACTS_CONTROLLER_REFERENCE_NAME = 'attenuators'
+MOBLY_CONTROLLER_CONFIG_NAME = "Attenuator"
+ACTS_CONTROLLER_REFERENCE_NAME = "attenuators"
 _ATTENUATOR_OPEN_RETRIES = 3
 
 
 def create(configs):
     objs = []
     for c in configs:
-        attn_model = c['Model']
+        attn_model = c["Model"]
         # Default to telnet.
-        protocol = c.get('Protocol', 'telnet')
-        module_name = 'antlion.controllers.attenuator_lib.%s.%s' % (attn_model,
-                                                                 protocol)
+        protocol = c.get("Protocol", "telnet")
+        module_name = "antlion.controllers.attenuator_lib.%s.%s" % (
+            attn_model,
+            protocol,
+        )
         module = importlib.import_module(module_name)
-        inst_cnt = c['InstrumentCount']
+        inst_cnt = c["InstrumentCount"]
         attn_inst = module.AttenuatorInstrument(inst_cnt)
         attn_inst.model = attn_model
 
@@ -45,27 +47,30 @@
             try:
                 attn_inst.open(ip_address, port)
             except Exception as e:
-                logging.error('Attempt %s to open connection to attenuator '
-                              'failed: %s' % (attempt_number, e))
+                logging.error(
+                    "Attempt %s to open connection to attenuator "
+                    "failed: %s" % (attempt_number, e)
+                )
                 if attempt_number == _ATTENUATOR_OPEN_RETRIES:
-                    ping_output = job.run('ping %s -c 1 -w 1' % ip_address,
-                                          ignore_status=True)
+                    ping_output = job.run(
+                        "ping %s -c 1 -w 1" % ip_address, ignore_status=True
+                    )
                     if ping_output.exit_status == 1:
-                        logging.error('Unable to ping attenuator at %s' %
-                                      ip_address)
+                        logging.error("Unable to ping attenuator at %s" % ip_address)
                     else:
-                        logging.error('Able to ping attenuator at %s' %
-                                      ip_address)
-                        job.run('echo "q" | telnet %s %s' % (ip_address, port),
-                                ignore_status=True)
+                        logging.error("Able to ping attenuator at %s" % ip_address)
+                        job.run(
+                            'echo "q" | telnet %s %s' % (ip_address, port),
+                            ignore_status=True,
+                        )
                     raise
         for i in range(inst_cnt):
             attn = Attenuator(attn_inst, idx=i)
-            if 'Paths' in c:
+            if "Paths" in c:
                 try:
-                    setattr(attn, 'path', c['Paths'][i])
+                    setattr(attn, "path", c["Paths"][i])
                 except IndexError:
-                    logging.error('No path specified for attenuator %d.', i)
+                    logging.error("No path specified for attenuator %d.", i)
                     raise
             objs.append(attn)
     return objs
@@ -84,7 +89,7 @@
     for attenuator in attenuators:
         info = {
             "Address": attenuator.instrument.address,
-            "Attenuator_Port": attenuator.idx
+            "Attenuator_Port": attenuator.idx,
         }
         device_info.append(info)
     return device_info
@@ -95,8 +100,7 @@
         attn.instrument.close()
 
 
-def get_attenuators_for_device(device_attenuator_configs, attenuators,
-                               attenuator_key):
+def get_attenuators_for_device(device_attenuator_configs, attenuators, attenuator_key):
     """Gets the list of attenuators associated to a specified device and builds
     a list of the attenuator objects associated to the ip address in the
     device's section of the ACTS config and the Attenuator's IP address.  In the
@@ -155,9 +159,10 @@
     for device_attenuator_config in device_attenuator_configs:
         for attenuator_port in device_attenuator_config[attenuator_key]:
             for attenuator in attenuators:
-                if (attenuator.instrument.address ==
-                        device_attenuator_config['Address']
-                        and attenuator.idx is attenuator_port):
+                if (
+                    attenuator.instrument.address == device_attenuator_config["Address"]
+                    and attenuator.idx is attenuator_port
+                ):
                     attenuator_list.append(attenuator)
     return attenuator_list
 
@@ -175,7 +180,7 @@
 
 
 class InvalidDataError(AttenuatorError):
-    """"Raised when an unexpected result is seen on the transport layer.
+    """ "Raised when an unexpected result is seen on the transport layer.
 
     When this exception is seen, closing an re-opening the link to the
     attenuator instrument is probably necessary. Something has gone wrong in
@@ -202,6 +207,7 @@
     constructors. Outside of setup/teardown, devices should be accessed via
     this generic "interface".
     """
+
     model = None
     INVALID_MAX_ATTEN = 999.9
 
@@ -219,8 +225,7 @@
         """
 
         if type(self) is AttenuatorInstrument:
-            raise NotImplementedError(
-                'Base class should not be instantiated directly!')
+            raise NotImplementedError("Base class should not be instantiated directly!")
 
         self.num_atten = num_atten
         self.max_atten = AttenuatorInstrument.INVALID_MAX_ATTEN
@@ -238,7 +243,7 @@
                 bounds values to 0 or max_atten.
             retry: if True, command will be retried if possible
         """
-        raise NotImplementedError('Base class should not be called directly!')
+        raise NotImplementedError("Base class should not be called directly!")
 
     def get_atten(self, idx, retry=False):
         """Returns the current attenuation of the attenuator at index idx.
@@ -251,7 +256,7 @@
         Returns:
             The current attenuation value as a floating point value
         """
-        raise NotImplementedError('Base class should not be called directly!')
+        raise NotImplementedError("Base class should not be called directly!")
 
 
 class Attenuator(object):
@@ -281,15 +286,14 @@
             IndexError if the index is out of range.
         """
         if not isinstance(instrument, AttenuatorInstrument):
-            raise TypeError('Must provide an Attenuator Instrument Ref')
+            raise TypeError("Must provide an Attenuator Instrument Ref")
         self.model = instrument.model
         self.instrument = instrument
         self.idx = idx
         self.offset = offset
 
         if self.idx >= instrument.num_atten:
-            raise IndexError(
-                'Attenuator index out of range for attenuator instrument')
+            raise IndexError("Attenuator index out of range for attenuator instrument")
 
     def set_atten(self, value, strict=True, retry=False):
         """Sets the attenuation.
@@ -305,13 +309,11 @@
             ValueError if value + offset is greater than the maximum value.
         """
         if value + self.offset > self.instrument.max_atten and strict:
-            raise ValueError(
-                'Attenuator Value+Offset greater than Max Attenuation!')
+            raise ValueError("Attenuator Value+Offset greater than Max Attenuation!")
 
-        self.instrument.set_atten(self.idx,
-                                  value + self.offset,
-                                  strict=strict,
-                                  retry=retry)
+        self.instrument.set_atten(
+            self.idx, value + self.offset, strict=strict, retry=retry
+        )
 
     def get_atten(self, retry=False):
         """Returns the attenuation as a float, normalized by the offset."""
@@ -320,7 +322,7 @@
     def get_max_atten(self):
         """Returns the max attenuation as a float, normalized by the offset."""
         if self.instrument.max_atten == AttenuatorInstrument.INVALID_MAX_ATTEN:
-            raise ValueError('Invalid Max Attenuator Value')
+            raise ValueError("Invalid Max Attenuator Value")
 
         return self.instrument.max_atten - self.offset
 
@@ -336,7 +338,7 @@
     small loops scattered throughout user code.
     """
 
-    def __init__(self, name=''):
+    def __init__(self, name=""):
         """This constructor for AttenuatorGroup
 
         Args:
@@ -366,7 +368,7 @@
             Requires a valid AttenuatorInstrument to be passed in.
         """
         if not instrument or not isinstance(instrument, AttenuatorInstrument):
-            raise TypeError('Must provide an Attenuator Instrument Ref')
+            raise TypeError("Must provide an Attenuator Instrument Ref")
 
         if type(indices) is range or type(indices) is list:
             for i in indices:
@@ -384,7 +386,7 @@
             TypeError if the attenuator parameter is not an Attenuator.
         """
         if not isinstance(attenuator, Attenuator):
-            raise TypeError('Must provide an Attenuator')
+            raise TypeError("Must provide an Attenuator")
 
         self.attens.append(attenuator)
 
diff --git a/src/antlion/controllers/attenuator_lib/_tnhelper.py b/src/antlion/controllers/attenuator_lib/_tnhelper.py
index 643ccdf..61b4193 100644
--- a/src/antlion/controllers/attenuator_lib/_tnhelper.py
+++ b/src/antlion/controllers/attenuator_lib/_tnhelper.py
@@ -26,7 +26,7 @@
 
 
 def _ascii_string(uc_string):
-    return str(uc_string).encode('ASCII')
+    return str(uc_string).encode("ASCII")
 
 
 class _TNHelper(object):
@@ -35,10 +35,8 @@
     It should only be used by those implementation control libraries and not by
     any user code directly.
     """
-    def __init__(self,
-                 tx_cmd_separator='\n',
-                 rx_cmd_separator='\n',
-                 prompt=''):
+
+    def __init__(self, tx_cmd_separator="\n", rx_cmd_separator="\n", prompt=""):
         self._tn = None
         self._ip_address = None
         self._port = None
@@ -77,37 +75,37 @@
             True when telnet server is reachable and telnet connection has been
             successfully reopened
         """
-        logging.debug('Diagnosing telnet connection')
+        logging.debug("Diagnosing telnet connection")
         try:
-            job_result = job.run('ping {} -c 5 -i 0.2'.format(
-                self._ip_address))
+            job_result = job.run("ping {} -c 5 -i 0.2".format(self._ip_address))
         except:
             logging.error("Unable to ping telnet server.")
             return False
         ping_output = job_result.stdout
-        if not re.search(r' 0% packet loss', ping_output):
-            logging.error('Ping Packets Lost. Result: {}'.format(ping_output))
+        if not re.search(r" 0% packet loss", ping_output):
+            logging.error("Ping Packets Lost. Result: {}".format(ping_output))
             return False
         try:
             self.close()
         except:
-            logging.error('Cannot close telnet connection.')
+            logging.error("Cannot close telnet connection.")
             return False
         try:
             self.open(self._ip_address, self._port)
         except:
-            logging.error('Cannot reopen telnet connection.')
+            logging.error("Cannot reopen telnet connection.")
             return False
-        logging.debug('Telnet connection likely recovered')
+        logging.debug("Telnet connection likely recovered")
         return True
 
     def cmd(self, cmd_str, wait_ret=True, retry=False):
         if not isinstance(cmd_str, str):
-            raise TypeError('Invalid command string', cmd_str)
+            raise TypeError("Invalid command string", cmd_str)
 
         if not self.is_open():
             raise attenuator.InvalidOperationError(
-                'Telnet connection not open for commands')
+                "Telnet connection not open for commands"
+            )
 
         cmd_str.strip(self.tx_cmd_separator)
         self._tn.read_until(_ascii_string(self.prompt), 2)
@@ -117,23 +115,25 @@
             return None
 
         match_idx, match_val, ret_text = self._tn.expect(
-            [_ascii_string('\S+' + self.rx_cmd_separator)], 1)
+            [_ascii_string("\S+" + self.rx_cmd_separator)], 1
+        )
 
-        logging.debug('Telnet Command: {}'.format(cmd_str))
-        logging.debug('Telnet Reply: ({},{},{})'.format(
-            match_idx, match_val, ret_text))
+        logging.debug("Telnet Command: {}".format(cmd_str))
+        logging.debug("Telnet Reply: ({},{},{})".format(match_idx, match_val, ret_text))
 
         if match_idx == -1:
             telnet_recovered = self.diagnose_telnet()
             if telnet_recovered and retry:
-                logging.debug('Retrying telnet command once.')
+                logging.debug("Retrying telnet command once.")
                 return self.cmd(cmd_str, wait_ret, retry=False)
             else:
                 raise attenuator.InvalidDataError(
-                    'Telnet command failed to return valid data')
+                    "Telnet command failed to return valid data"
+                )
 
         ret_text = ret_text.decode()
-        ret_text = ret_text.strip(self.tx_cmd_separator +
-                                  self.rx_cmd_separator + self.prompt)
+        ret_text = ret_text.strip(
+            self.tx_cmd_separator + self.rx_cmd_separator + self.prompt
+        )
 
         return ret_text
diff --git a/src/antlion/controllers/attenuator_lib/aeroflex/telnet.py b/src/antlion/controllers/attenuator_lib/aeroflex/telnet.py
index 9d38214..4c34f4b 100644
--- a/src/antlion/controllers/attenuator_lib/aeroflex/telnet.py
+++ b/src/antlion/controllers/attenuator_lib/aeroflex/telnet.py
@@ -13,7 +13,6 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
 """
 Class for Telnet control of Aeroflex 832X and 833X Series Attenuator Modules
 
@@ -30,13 +29,12 @@
 
 
 class AttenuatorInstrument(attenuator.AttenuatorInstrument):
-
     def __init__(self, num_atten=0):
         super(AttenuatorInstrument, self).__init__(num_atten)
 
-        self._tnhelper = _tnhelper._TNHelper(tx_cmd_separator='\r\n',
-                                             rx_cmd_separator='\r\n',
-                                             prompt='>')
+        self._tnhelper = _tnhelper._TNHelper(
+            tx_cmd_separator="\r\n", rx_cmd_separator="\r\n", prompt=">"
+        )
         self.properties = None
         self.address = None
 
@@ -52,19 +50,22 @@
         self._tnhelper.open(host, port)
 
         # work around a bug in IO, but this is a good thing to do anyway
-        self._tnhelper.cmd('*CLS', False)
+        self._tnhelper.cmd("*CLS", False)
         self.address = host
 
         if self.num_atten == 0:
-            self.num_atten = int(self._tnhelper.cmd('RFCONFIG? CHAN'))
+            self.num_atten = int(self._tnhelper.cmd("RFCONFIG? CHAN"))
 
-        configstr = self._tnhelper.cmd('RFCONFIG? ATTN 1')
+        configstr = self._tnhelper.cmd("RFCONFIG? ATTN 1")
 
-        self.properties = dict(zip(['model', 'max_atten', 'min_step',
-                                    'unknown', 'unknown2', 'cfg_str'],
-                                   configstr.split(", ", 5)))
+        self.properties = dict(
+            zip(
+                ["model", "max_atten", "min_step", "unknown", "unknown2", "cfg_str"],
+                configstr.split(", ", 5),
+            )
+        )
 
-        self.max_atten = float(self.properties['max_atten'])
+        self.max_atten = float(self.properties["max_atten"])
 
     def is_open(self):
         """Returns True if the AttenuatorInstrument has an open connection."""
@@ -95,17 +96,15 @@
                 attenuation value.
         """
         if not self.is_open():
-            raise attenuator.InvalidOperationError('Connection not open!')
+            raise attenuator.InvalidOperationError("Connection not open!")
 
         if idx >= self.num_atten:
-            raise IndexError('Attenuator index out of range!', self.num_atten,
-                             idx)
+            raise IndexError("Attenuator index out of range!", self.num_atten, idx)
 
         if value > self.max_atten:
-            raise ValueError('Attenuator value out of range!', self.max_atten,
-                             value)
+            raise ValueError("Attenuator value out of range!", self.max_atten, value)
 
-        self._tnhelper.cmd('ATTN ' + str(idx + 1) + ' ' + str(value), False)
+        self._tnhelper.cmd("ATTN " + str(idx + 1) + " " + str(value), False)
 
     def get_atten(self, idx, **_):
         """Returns the current attenuation of the attenuator at the given index.
@@ -120,12 +119,12 @@
             the current attenuation value as a float
         """
         if not self.is_open():
-            raise attenuator.InvalidOperationError('Connection not open!')
+            raise attenuator.InvalidOperationError("Connection not open!")
 
         #       Potentially redundant safety check removed for the moment
         #       if idx >= self.num_atten:
         #           raise IndexError("Attenuator index out of range!", self.num_atten, idx)
 
-        atten_val = self._tnhelper.cmd('ATTN? ' + str(idx + 1))
+        atten_val = self._tnhelper.cmd("ATTN? " + str(idx + 1))
 
         return float(atten_val)
diff --git a/src/antlion/controllers/attenuator_lib/minicircuits/http.py b/src/antlion/controllers/attenuator_lib/minicircuits/http.py
index c84f64b..61c1e29 100644
--- a/src/antlion/controllers/attenuator_lib/minicircuits/http.py
+++ b/src/antlion/controllers/attenuator_lib/minicircuits/http.py
@@ -57,18 +57,22 @@
         self._timeout = timeout
         self.address = host
 
-        att_req = urllib.request.urlopen('http://{}:{}/MN?'.format(
-            self._ip_address, self._port))
-        config_str = att_req.read().decode('utf-8').strip()
-        if not config_str.startswith('MN='):
+        att_req = urllib.request.urlopen(
+            "http://{}:{}/MN?".format(self._ip_address, self._port)
+        )
+        config_str = att_req.read().decode("utf-8").strip()
+        if not config_str.startswith("MN="):
             raise attenuator.InvalidDataError(
-                'Attenuator returned invalid data. Attenuator returned: {}'.
-                format(config_str))
+                "Attenuator returned invalid data. Attenuator returned: {}".format(
+                    config_str
+                )
+            )
 
-        config_str = config_str[len('MN='):]
+        config_str = config_str[len("MN=") :]
         self.properties = dict(
-            zip(['model', 'max_freq', 'max_atten'], config_str.split('-', 2)))
-        self.max_atten = float(self.properties['max_atten'])
+            zip(["model", "max_freq", "max_atten"], config_str.split("-", 2))
+        )
+        self.max_atten = float(self.properties["max_atten"])
 
     def is_open(self):
         """Returns True if the AttenuatorInstrument has an open connection.
@@ -104,27 +108,28 @@
             expected output.
         """
         if not (0 <= idx < self.num_atten):
-            raise IndexError('Attenuator index out of range!', self.num_atten,
-                             idx)
+            raise IndexError("Attenuator index out of range!", self.num_atten, idx)
 
         if value > self.max_atten and strict:
-            raise ValueError('Attenuator value out of range!', self.max_atten,
-                             value)
+            raise ValueError("Attenuator value out of range!", self.max_atten, value)
         # The actual device uses one-based index for channel numbers.
         adjusted_value = min(max(0, value), self.max_atten)
         att_req = urllib.request.urlopen(
-            'http://{}:{}/CHAN:{}:SETATT:{}'.format(self._ip_address,
-                                                    self._port, idx + 1,
-                                                    adjusted_value),
-            timeout=self._timeout)
-        att_resp = att_req.read().decode('utf-8').strip()
-        if att_resp != '1':
+            "http://{}:{}/CHAN:{}:SETATT:{}".format(
+                self._ip_address, self._port, idx + 1, adjusted_value
+            ),
+            timeout=self._timeout,
+        )
+        att_resp = att_req.read().decode("utf-8").strip()
+        if att_resp != "1":
             if retry:
                 self.set_atten(idx, value, strict, retry=False)
             else:
                 raise attenuator.InvalidDataError(
-                    'Attenuator returned invalid data. Attenuator returned: {}'
-                    .format(att_resp))
+                    "Attenuator returned invalid data. Attenuator returned: {}".format(
+                        att_resp
+                    )
+                )
 
     def get_atten(self, idx, retry=False, **_):
         """Returns the current attenuation of the attenuator at the given index.
@@ -141,12 +146,12 @@
             the current attenuation value as a float
         """
         if not (0 <= idx < self.num_atten):
-            raise IndexError('Attenuator index out of range!', self.num_atten,
-                             idx)
+            raise IndexError("Attenuator index out of range!", self.num_atten, idx)
         att_req = urllib.request.urlopen(
-            'http://{}:{}/CHAN:{}:ATT?'.format(self._ip_address, self.port, idx + 1),
-            timeout=self._timeout)
-        att_resp = att_req.read().decode('utf-8').strip()
+            "http://{}:{}/CHAN:{}:ATT?".format(self._ip_address, self.port, idx + 1),
+            timeout=self._timeout,
+        )
+        att_resp = att_req.read().decode("utf-8").strip()
         try:
             atten_val = float(att_resp)
         except:
@@ -154,6 +159,8 @@
                 self.get_atten(idx, retry=False)
             else:
                 raise attenuator.InvalidDataError(
-                    'Attenuator returned invalid data. Attenuator returned: {}'
-                    .format(att_resp))
+                    "Attenuator returned invalid data. Attenuator returned: {}".format(
+                        att_resp
+                    )
+                )
         return atten_val
diff --git a/src/antlion/controllers/attenuator_lib/minicircuits/telnet.py b/src/antlion/controllers/attenuator_lib/minicircuits/telnet.py
index 538532f..ad9f0ce 100644
--- a/src/antlion/controllers/attenuator_lib/minicircuits/telnet.py
+++ b/src/antlion/controllers/attenuator_lib/minicircuits/telnet.py
@@ -40,9 +40,9 @@
 
     def __init__(self, num_atten=0):
         super(AttenuatorInstrument, self).__init__(num_atten)
-        self._tnhelper = _tnhelper._TNHelper(tx_cmd_separator='\r\n',
-                                             rx_cmd_separator='\r\n',
-                                             prompt='')
+        self._tnhelper = _tnhelper._TNHelper(
+            tx_cmd_separator="\r\n", rx_cmd_separator="\r\n", prompt=""
+        )
         self.address = None
 
     def __del__(self):
@@ -64,14 +64,15 @@
         if self.num_atten == 0:
             self.num_atten = 1
 
-        config_str = self._tnhelper.cmd('MN?')
+        config_str = self._tnhelper.cmd("MN?")
 
-        if config_str.startswith('MN='):
-            config_str = config_str[len('MN='):]
+        if config_str.startswith("MN="):
+            config_str = config_str[len("MN=") :]
 
         self.properties = dict(
-            zip(['model', 'max_freq', 'max_atten'], config_str.split('-', 2)))
-        self.max_atten = float(self.properties['max_atten'])
+            zip(["model", "max_freq", "max_atten"], config_str.split("-", 2))
+        )
+        self.max_atten = float(self.properties["max_atten"])
 
     def is_open(self):
         """Returns True if the AttenuatorInstrument has an open connection."""
@@ -107,19 +108,16 @@
         """
 
         if not self.is_open():
-            raise attenuator.InvalidOperationError('Connection not open!')
+            raise attenuator.InvalidOperationError("Connection not open!")
 
         if idx >= self.num_atten:
-            raise IndexError('Attenuator index out of range!', self.num_atten,
-                             idx)
+            raise IndexError("Attenuator index out of range!", self.num_atten, idx)
 
         if value > self.max_atten and strict:
-            raise ValueError('Attenuator value out of range!', self.max_atten,
-                             value)
+            raise ValueError("Attenuator value out of range!", self.max_atten, value)
         # The actual device uses one-based index for channel numbers.
         adjusted_value = min(max(0, value), self.max_atten)
-        self._tnhelper.cmd('CHAN:%s:SETATT:%s' % (idx + 1, adjusted_value),
-                           retry=retry)
+        self._tnhelper.cmd("CHAN:%s:SETATT:%s" % (idx + 1, adjusted_value), retry=retry)
 
     def get_atten(self, idx, retry=False):
         """Returns the current attenuation of the attenuator at the given index.
@@ -135,16 +133,14 @@
             the current attenuation value as a float
         """
         if not self.is_open():
-            raise attenuator.InvalidOperationError('Connection not open!')
+            raise attenuator.InvalidOperationError("Connection not open!")
 
         if idx >= self.num_atten or idx < 0:
-            raise IndexError('Attenuator index out of range!', self.num_atten,
-                             idx)
+            raise IndexError("Attenuator index out of range!", self.num_atten, idx)
 
         if self.num_atten == 1:
-            atten_val_str = self._tnhelper.cmd(':ATT?', retry=retry)
+            atten_val_str = self._tnhelper.cmd(":ATT?", retry=retry)
         else:
-            atten_val_str = self._tnhelper.cmd('CHAN:%s:ATT?' % (idx + 1),
-                                               retry=retry)
+            atten_val_str = self._tnhelper.cmd("CHAN:%s:ATT?" % (idx + 1), retry=retry)
         atten_val = float(atten_val_str)
         return atten_val
diff --git a/src/antlion/controllers/bits.py b/src/antlion/controllers/bits.py
deleted file mode 100644
index 0a9ed21..0000000
--- a/src/antlion/controllers/bits.py
+++ /dev/null
@@ -1,470 +0,0 @@
-"""Module managing the required definitions for using the bits power monitor"""
-
-import logging
-import os
-import time
-import uuid
-
-from antlion import context
-from antlion.controllers import power_metrics
-from antlion.controllers import power_monitor
-from antlion.controllers.bits_lib import bits_client
-from antlion.controllers.bits_lib import bits_service
-from antlion.controllers.bits_lib import bits_service_config as bsc
-
-MOBLY_CONTROLLER_CONFIG_NAME = 'Bits'
-ACTS_CONTROLLER_REFERENCE_NAME = 'bitses'
-
-
-def create(configs):
-    return [Bits(index, config) for (index, config) in enumerate(configs)]
-
-
-def destroy(bitses):
-    for bits in bitses:
-        bits.teardown()
-
-
-def get_info(bitses):
-    return [bits.config for bits in bitses]
-
-
-class BitsError(Exception):
-    pass
-
-
-class _BitsCollection(object):
-    """Object that represents a bits collection
-
-    Attributes:
-        name: The name given to the collection.
-        markers_buffer: An array of un-flushed markers, each marker is
-        represented by a bi-dimensional tuple with the format
-        (<nanoseconds_since_epoch or datetime>, <text>).
-        monsoon_output_path: A path to store monsoon-like data if possible, Bits
-        uses this path to attempt data extraction in monsoon format, if this
-        parameter is left as None such extraction is not attempted.
-    """
-
-    def __init__(self, name, monsoon_output_path=None):
-        self.monsoon_output_path = monsoon_output_path
-        self.name = name
-        self.markers_buffer = []
-
-    def add_marker(self, timestamp, marker_text):
-        self.markers_buffer.append((timestamp, marker_text))
-
-
-def _transform_name(bits_metric_name):
-    """Transform bits metrics names to a more succinct version.
-
-    Examples of bits_metrics_name as provided by the client:
-    - default_device.slider.C1_30__PP0750_L1S_VDD_G3D_M_P:mA,
-    - default_device.slider.C1_30__PP0750_L1S_VDD_G3D_M_P:mW,
-    - default_device.Monsoon.Monsoon:mA,
-    - default_device.Monsoon.Monsoon:mW,
-    - <device>.<collector>.<rail>:<unit>
-
-    Args:
-        bits_metric_name: A bits metric name.
-
-    Returns:
-        For monsoon metrics, and for backwards compatibility:
-          Monsoon:mA -> avg_current,
-          Monsoon:mW -> avg_power,
-
-        For everything else:
-          <rail>:mW -> <rail/rail>_avg_current
-          <rail>:mW -> <rail/rail>_avg_power
-          ...
-    """
-    prefix, unit = bits_metric_name.split(':')
-    rail = prefix.split('.')[-1]
-
-    if 'mW' == unit:
-        suffix = 'avg_power'
-    elif 'mA' == unit:
-        suffix = 'avg_current'
-    elif 'mV' == unit:
-        suffix = 'avg_voltage'
-    else:
-        logging.warning('unknown unit type for unit %s' % unit)
-        suffix = ''
-
-    if 'Monsoon' == rail:
-        return suffix
-    elif suffix == '':
-        return rail
-    else:
-        return '%s_%s' % (rail, suffix)
-
-
-def _raw_data_to_metrics(raw_data_obj):
-    data = raw_data_obj['data']
-    metrics = []
-    for sample in data:
-        unit = sample['unit']
-        if 'Msg' == unit:
-            continue
-        elif 'mW' == unit:
-            unit_type = 'power'
-        elif 'mA' == unit:
-            unit_type = 'current'
-        elif 'mV' == unit:
-            unit_type = 'voltage'
-        else:
-            logging.warning('unknown unit type for unit %s' % unit)
-            continue
-
-        name = _transform_name(sample['name'])
-        avg = sample['avg']
-        metrics.append(power_metrics.Metric(avg, unit_type, unit, name=name))
-
-    return metrics
-
-
-def _get_single_file(registry, key):
-    if key not in registry:
-        return None
-    entry = registry[key]
-    if isinstance(entry, str):
-        return entry
-    if isinstance(entry, list):
-        return None if len(entry) == 0 else entry[0]
-    raise ValueError('registry["%s"] is of unsupported type %s for this '
-                     'operation. Supported types are str and list.' % (
-                         key, type(entry)))
-
-
-class Bits(object):
-
-    ROOT_RAIL_KEY = 'RootRail'
-    ROOT_RAIL_DEFAULT_VALUE = 'Monsoon:mA'
-
-    def __init__(self, index, config):
-        """Creates an instance of a bits controller.
-
-        Args:
-            index: An integer identifier for this instance, this allows to
-                tell apart different instances in the case where multiple
-                bits controllers are being used concurrently.
-            config: The config as defined in the ACTS  BiTS controller config.
-                Expected format is:
-                {
-                    // optional
-                    'Monsoon':   {
-                        'serial_num': <serial number:int>,
-                        'monsoon_voltage': <voltage:double>
-                    }
-                    // optional
-                    'Kibble': [
-                        {
-                            'board': 'BoardName1',
-                            'connector': 'A',
-                            'serial': 'serial_1'
-                        },
-                        {
-                            'board': 'BoardName2',
-                            'connector': 'D',
-                            'serial': 'serial_2'
-                        }
-                    ]
-                    // optional
-                    'RootRail': 'Monsoon:mA'
-                }
-        """
-        self.index = index
-        self.config = config
-        self._service = None
-        self._client = None
-        self._active_collection = None
-        self._collections_counter = 0
-        self._root_rail = config.get(self.ROOT_RAIL_KEY,
-                                     self.ROOT_RAIL_DEFAULT_VALUE)
-
-    def setup(self, *_, registry=None, **__):
-        """Starts a bits_service in the background.
-
-        This function needs to be called with either a registry or after calling
-        power_monitor.update_registry, and it needs to be called before any other
-        method in this class.
-
-        Args:
-            registry: A dictionary with files used by bits. Format:
-                {
-                    // required, string or list of strings
-                    bits_service: ['/path/to/bits_service']
-
-                    // required, string or list of strings
-                    bits_client: ['/path/to/bits.par']
-
-                    // needed for monsoon, string or list of strings
-                    lvpm_monsoon: ['/path/to/lvpm_monsoon.par']
-
-                    // needed for monsoon, string or list of strings
-                    hvpm_monsoon: ['/path/to/hvpm_monsoon.par']
-
-                    // needed for kibble, string or list of strings
-                    kibble_bin: ['/path/to/kibble.par']
-
-                    // needed for kibble, string or list of strings
-                    kibble_board_file: ['/path/to/phone_s.board']
-
-                    // optional, string or list of strings
-                    vm_file: ['/path/to/file.vm']
-                }
-
-                All fields in this dictionary can be either a string or a list
-                of strings. If lists are passed, only their first element is
-                taken into account. The reason for supporting lists but only
-                acting on their first element is for easier integration with
-                harnesses that handle resources as lists.
-        """
-        if registry is None:
-            registry = power_monitor.get_registry()
-        if 'bits_service' not in registry:
-            raise ValueError('No bits_service binary has been defined in the '
-                             'global registry.')
-        if 'bits_client' not in registry:
-            raise ValueError('No bits_client binary has been defined in the '
-                             'global registry.')
-
-        bits_service_binary = _get_single_file(registry, 'bits_service')
-        bits_client_binary = _get_single_file(registry, 'bits_client')
-        lvpm_monsoon_bin = _get_single_file(registry, 'lvpm_monsoon')
-        hvpm_monsoon_bin = _get_single_file(registry, 'hvpm_monsoon')
-        kibble_bin = _get_single_file(registry, 'kibble_bin')
-        kibble_board_file = _get_single_file(registry, 'kibble_board_file')
-        vm_file = _get_single_file(registry, 'vm_file')
-        config = bsc.BitsServiceConfig(self.config,
-                                       lvpm_monsoon_bin=lvpm_monsoon_bin,
-                                       hvpm_monsoon_bin=hvpm_monsoon_bin,
-                                       kibble_bin=kibble_bin,
-                                       kibble_board_file=kibble_board_file,
-                                       virtual_metrics_file=vm_file)
-        output_log = os.path.join(
-            context.get_current_context().get_full_output_path(),
-            'bits_service_out_%s.txt' % self.index)
-        service_name = 'bits_config_%s' % self.index
-
-        self._active_collection = None
-        self._collections_counter = 0
-        self._service = bits_service.BitsService(config,
-                                                 bits_service_binary,
-                                                 output_log,
-                                                 name=service_name,
-                                                 timeout=3600 * 24)
-        self._service.start()
-        self._client = bits_client.BitsClient(bits_client_binary,
-                                              self._service,
-                                              config)
-        # this call makes sure that the client can interact with the server.
-        devices = self._client.list_devices()
-        logging.debug(devices)
-
-    def disconnect_usb(self, *_, **__):
-        self._client.disconnect_usb()
-
-    def connect_usb(self, *_, **__):
-        self._client.connect_usb()
-
-    def measure(self, *_, measurement_args=None,
-                measurement_name=None, monsoon_output_path=None,
-                **__):
-        """Blocking function that measures power through bits for the specified
-        duration. Results need to be consulted through other methods such as
-        get_metrics or post processing files like the ones
-        generated at monsoon_output_path after calling `release_resources`.
-
-        Args:
-            measurement_args: A dictionary with the following structure:
-                {
-                   'duration': <seconds to measure for>
-                   'hz': <samples per second>
-                   'measure_after_seconds': <sleep time before measurement>
-                }
-                The actual number of samples per second is limited by the
-                bits configuration. The value of hz is defaulted to 1000.
-            measurement_name: A name to give to the measurement (which is also
-                used as the Bits collection name. Bits collection names (and
-                therefore measurement names) need to be unique within the
-                context of a Bits object.
-            monsoon_output_path: If provided this path will be used to generate
-                a monsoon like formatted file at the release_resources step.
-        """
-        if measurement_args is None:
-            raise ValueError('measurement_args can not be left undefined')
-
-        duration = measurement_args.get('duration')
-        if duration is None:
-            raise ValueError(
-                'duration can not be left undefined within measurement_args')
-
-        hz = measurement_args.get('hz', 1000)
-
-        # Delay the start of the measurement if an offset is required
-        measure_after_seconds = measurement_args.get('measure_after_seconds')
-        if measure_after_seconds:
-            time.sleep(measure_after_seconds)
-
-        if self._active_collection:
-            raise BitsError(
-                'Attempted to start a collection while there is still an '
-                'active one. Active collection: %s',
-                self._active_collection.name)
-
-        self._collections_counter = self._collections_counter + 1
-        # The name gets a random 8 characters salt suffix because the Bits
-        # client has a bug where files with the same name are considered to be
-        # the same collection and it won't load two files with the same name.
-        # b/153170987 b/153944171
-        if not measurement_name:
-            measurement_name = 'bits_collection_%s_%s' % (
-                str(self._collections_counter), str(uuid.uuid4())[0:8])
-
-        self._active_collection = _BitsCollection(measurement_name,
-                                                  monsoon_output_path)
-        self._client.start_collection(self._active_collection.name,
-                                      default_sampling_rate=hz)
-        time.sleep(duration)
-
-    def get_metrics(self, *_, timestamps=None, **__):
-        """Gets metrics for the segments delimited by the timestamps dictionary.
-
-        Must be called before releasing resources, otherwise it will fail adding
-        markers to the collection.
-
-        Args:
-            timestamps: A dictionary of the shape:
-                {
-                    'segment_name': {
-                        'start' : <milliseconds_since_epoch> or <datetime>
-                        'end': <milliseconds_since_epoch> or <datetime>
-                    }
-                    'another_segment': {
-                        'start' : <milliseconds_since_epoch> or <datetime>
-                        'end': <milliseconds_since_epoch> or <datetime>
-                    }
-                }
-        Returns:
-            A dictionary of the shape:
-                {
-                    'segment_name': <list of power_metrics.Metric>
-                    'another_segment': <list of power_metrics.Metric>
-                }
-        """
-        if timestamps is None:
-            raise ValueError('timestamps dictionary can not be left undefined')
-
-        metrics = {}
-
-        for segment_name, times in timestamps.items():
-            if 'start' not in times or 'end' not in times:
-                continue
-
-            start = times['start']
-            end = times['end']
-
-            # bits accepts nanoseconds only, but since this interface needs to
-            # backwards compatible with monsoon which works with milliseconds we
-            # require to do a conversion from milliseconds to nanoseconds.
-            # The preferred way for new calls to this function should be using
-            # datetime instead which is unambiguous
-            if isinstance(start, (int, float)):
-                start = start * 1e6
-            if isinstance(end, (int, float)):
-                end = end * 1e6
-
-            raw_metrics = self._client.get_metrics(self._active_collection.name,
-                                                   start=start, end=end)
-            self._add_marker(start, 'start - %s' % segment_name)
-            self._add_marker(end, 'end - %s' % segment_name)
-            metrics[segment_name] = _raw_data_to_metrics(raw_metrics)
-        return metrics
-
-    def _add_marker(self, timestamp, marker_text):
-        if not self._active_collection:
-            raise BitsError(
-                'markers can not be added without an active collection')
-        self._active_collection.add_marker(timestamp, marker_text)
-
-    def release_resources(self):
-        """Performs all the cleanup and export tasks.
-
-        In the way that Bits' is interfaced several tasks can not be performed
-        while a collection is still active (like exporting the data) and others
-        can only take place while the collection is still active (like adding
-        markers to a collection).
-
-        To workaround this unique workflow, the collections that are started
-        with the 'measure' method are not really stopped after the method
-        is unblocked, it is only stopped after this method is called.
-
-        All the export files (.7z.bits and monsoon-formatted file) are also
-        generated in this method.
-        """
-        if not self._active_collection:
-            raise BitsError(
-                'Attempted to stop a collection without starting one')
-        self._client.add_markers(self._active_collection.name,
-                                 self._active_collection.markers_buffer)
-        self._client.stop_collection(self._active_collection.name)
-
-        export_file = os.path.join(
-            context.get_current_context().get_full_output_path(),
-            '%s.7z.bits' % self._active_collection.name)
-        self._client.export(self._active_collection.name, export_file)
-        if self._active_collection.monsoon_output_path:
-            self._attempt_monsoon_format()
-        self._active_collection = None
-
-    def _attempt_monsoon_format(self):
-        """Attempts to create a monsoon-formatted file.
-
-        In the case where there is not enough information to retrieve a
-        monsoon-like file, this function will do nothing.
-        """
-        available_channels = self._client.list_channels(
-            self._active_collection.name)
-        milli_amps_channel = None
-
-        for channel in available_channels:
-            if channel.endswith(self._root_rail):
-                milli_amps_channel = self._root_rail
-                break
-
-        if milli_amps_channel is None:
-            logging.debug('No monsoon equivalent channels were found when '
-                          'attempting to recreate monsoon file format. '
-                          'Available channels were: %s',
-                          str(available_channels))
-            return
-
-        logging.debug('Recreating monsoon file format from channel: %s',
-                      milli_amps_channel)
-        self._client.export_as_monsoon_format(
-            self._active_collection.monsoon_output_path,
-            self._active_collection.name,
-            milli_amps_channel)
-
-    def get_waveform(self, file_path=None):
-        """Parses a file generated in release_resources.
-
-        Args:
-            file_path: Path to a waveform file.
-
-        Returns:
-            A list of tuples in which the first element is a timestamp and the
-            second element is the sampled current at that time.
-        """
-        if file_path is None:
-            raise ValueError('file_path can not be None')
-
-        return list(power_metrics.import_raw_data(file_path))
-
-    def teardown(self):
-        if self._service is None:
-            return
-
-        if self._service.service_state == bits_service.BitsServiceStates.STARTED:
-            self._service.stop()
diff --git a/src/antlion/controllers/bits_lib/__init__.py b/src/antlion/controllers/bits_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/bits_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/bits_lib/bits_client.py b/src/antlion/controllers/bits_lib/bits_client.py
deleted file mode 100644
index c68aafd..0000000
--- a/src/antlion/controllers/bits_lib/bits_client.py
+++ /dev/null
@@ -1,335 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import csv
-from datetime import datetime
-import logging
-import tempfile
-
-from antlion.libs.proc import job
-import yaml
-
-
-class BitsClientError(Exception):
-    pass
-
-
-# An arbitrary large number of seconds.
-ONE_YEAR = str(3600 * 24 * 365)
-EPOCH = datetime.utcfromtimestamp(0)
-
-
-def _to_ns(timestamp):
-    """Returns the numerical value of a timestamp in nanoseconds since epoch.
-
-    Args:
-        timestamp: Either a number or a datetime.
-
-    Returns:
-        Rounded timestamp if timestamp is numeric, number of nanoseconds since
-        epoch if timestamp is instance of datetime.datetime.
-    """
-    if isinstance(timestamp, datetime):
-        return int((timestamp - EPOCH).total_seconds() * 1e9)
-    elif isinstance(timestamp, (float, int)):
-        return int(timestamp)
-    raise ValueError('%s can not be converted to a numerical representation of '
-                     'nanoseconds.' % type(timestamp))
-
-
-class BitsClient(object):
-    """Helper class to issue bits' commands"""
-
-    def __init__(self, binary, service, service_config):
-        """Constructs a BitsClient.
-
-        Args:
-            binary: The location of the bits.par client binary.
-            service: A bits_service.BitsService object. The service is expected
-              to be previously setup.
-            service_config: The bits_service_config.BitsService object used to
-              start the service on service_port.
-        """
-        self._log = logging.getLogger()
-        self._binary = binary
-        self._service = service
-        self._server_config = service_config
-
-    def _acquire_monsoon(self):
-        """Gets hold of a Monsoon so no other processes can use it.
-        Only works if there is a monsoon."""
-        self._log.debug('acquiring monsoon')
-        self.run_cmd('--collector',
-                     'Monsoon',
-                     '--collector_cmd',
-                     'acquire_monsoon', timeout=10)
-
-    def _release_monsoon(self):
-        self._log.debug('releasing monsoon')
-        self.run_cmd('--collector',
-                     'Monsoon',
-                     '--collector_cmd',
-                     'release_monsoon', timeout=10)
-
-    def run_cmd(self, *args, timeout=60):
-        """Executes a generic bits.par command.
-
-        Args:
-            args: A bits.par command as a tokenized array. The path to the
-              binary and the service port are provided by default, cmd should
-              only contain the remaining tokens of the desired command.
-            timeout: Number of seconds to wait for the command to finish before
-              forcibly killing it.
-        """
-        result = job.run([self._binary, '--port',
-                          self._service.port] + [str(arg) for arg in args],
-                         timeout=timeout)
-        return result.stdout
-
-    def export(self, collection_name, path):
-        """Exports a collection to its bits persistent format.
-
-        Exported files can be shared and opened through the Bits UI.
-
-        Args:
-            collection_name: Collection to be exported.
-            path: Where the resulting file should be created. Bits requires that
-            the resulting file ends in .7z.bits.
-        """
-        if not path.endswith('.7z.bits'):
-            raise BitsClientError('Bits\' collections can only be exported to '
-                                  'files ending in .7z.bits, got %s' % path)
-        self._log.debug('exporting collection %s to %s',
-                        collection_name,
-                        path)
-        self.run_cmd('--name',
-                     collection_name,
-                     '--ignore_gaps',
-                     '--export',
-                     '--export_path',
-                     path,
-                     timeout=600)
-
-    def export_as_csv(self, channels, collection_name, output_file):
-        """Export bits data as CSV.
-
-        Writes the selected channel data to the given output_file. Note that
-        the first line of the file contains headers.
-
-        Args:
-          channels: A list of string pattern matches for the channel to be
-            retrieved. For example, ":mW" will export all power channels,
-            ":mV" will export all voltage channels, "C1_01__" will export
-            power/voltage/current for the first fail of connector 1.
-          collection_name: A string for a collection that is sampling.
-          output_file: A string file path where the CSV will be written.
-        """
-        channels_arg = ','.join(channels)
-        cmd = ['--csvfile',
-               output_file,
-               '--name',
-               collection_name,
-               '--ignore_gaps',
-               '--csv_rawtimestamps',
-               '--channels',
-               channels_arg]
-        if self._server_config.has_virtual_metrics_file:
-            cmd = cmd + ['--vm_file', 'default']
-        self._log.debug(
-            'exporting csv for collection %s to %s, with channels %s',
-            collection_name, output_file, channels_arg)
-        self.run_cmd(*cmd, timeout=600)
-
-    def add_markers(self, collection_name, markers):
-        """Appends markers to a collection.
-
-        These markers are displayed in the Bits UI and are useful to label
-        important test events.
-
-        Markers can only be added to collections that have not been
-        closed / stopped. Markers need to be added in chronological order,
-        this function ensures that at least the markers added in each
-        call are sorted in chronological order, but if this function
-        is called multiple times, then is up to the user to ensure that
-        the subsequent batches of markers are for timestamps higher (newer)
-        than all the markers passed in previous calls to this function.
-
-        Args:
-            collection_name: The name of the collection to add markers to.
-            markers: A list of tuples of the shape:
-
-             [(<nano_seconds_since_epoch or datetime>, <marker text>),
-              (<nano_seconds_since_epoch or datetime>, <marker text>),
-              (<nano_seconds_since_epoch or datetime>, <marker text>),
-              ...
-            ]
-        """
-        # sorts markers in chronological order before adding them. This is
-        # required by go/pixel-bits
-        for ts, marker in sorted(markers, key=lambda x: _to_ns(x[0])):
-            self._log.debug('Adding marker at %s: %s', str(ts), marker)
-            self.run_cmd('--name',
-                         collection_name,
-                         '--log_ts',
-                         str(_to_ns(ts)),
-                         '--log',
-                         marker,
-                         timeout=10)
-
-    def get_metrics(self, collection_name, start=None, end=None):
-        """Extracts metrics for a period of time.
-
-        Args:
-            collection_name: The name of the collection to get metrics from
-            start: Numerical nanoseconds since epoch until the start of the
-            period of interest or datetime. If not provided, start will be the
-            beginning of the collection.
-            end: Numerical nanoseconds since epoch until the end of the
-            period of interest or datetime. If not provided, end will be the
-            end of the collection.
-        """
-        with tempfile.NamedTemporaryFile(prefix='bits_metrics') as tf:
-            cmd = ['--name',
-                   collection_name,
-                   '--ignore_gaps',
-                   '--aggregates_yaml_path',
-                   tf.name]
-
-            if start is not None:
-                cmd = cmd + ['--abs_start_time', str(_to_ns(start))]
-            if end is not None:
-                cmd = cmd + ['--abs_stop_time', str(_to_ns(end))]
-            if self._server_config.has_virtual_metrics_file:
-                cmd = cmd + ['--vm_file', 'default']
-
-            self.run_cmd(*cmd)
-            with open(tf.name) as mf:
-                self._log.debug(
-                    'bits aggregates for collection %s [%s-%s]: %s' % (
-                        collection_name, start, end,
-                        mf.read()))
-
-            with open(tf.name) as mf:
-                return yaml.safe_load(mf)
-
-    def disconnect_usb(self):
-        """Disconnects the monsoon's usb. Only works if there is a monsoon"""
-        self._log.debug('disconnecting monsoon\'s usb')
-        self.run_cmd('--collector',
-                     'Monsoon',
-                     '--collector_cmd',
-                     'usb_disconnect', timeout=10)
-
-    def start_collection(self, collection_name, default_sampling_rate=1000):
-        """Indicates Bits to start a collection.
-
-        Args:
-            collection_name: Name to give to the collection to be started.
-            Collection names must be unique at Bits' service level. If multiple
-            collections must be taken within the context of the same Bits'
-            service, ensure that each collection is given a different one.
-            default_sampling_rate: Samples per second to be collected
-        """
-
-        cmd = ['--name',
-               collection_name,
-               '--non_blocking',
-               '--time',
-               ONE_YEAR,
-               '--default_sampling_rate',
-               str(default_sampling_rate)]
-
-        if self._server_config.has_kibbles:
-            cmd = cmd + ['--disk_space_saver']
-
-        self._log.debug('starting collection %s', collection_name)
-        self.run_cmd(*cmd, timeout=10)
-
-    def connect_usb(self):
-        """Connects the monsoon's usb. Only works if there is a monsoon."""
-        cmd = ['--collector',
-               'Monsoon',
-               '--collector_cmd',
-               'usb_connect']
-        self._log.debug('connecting monsoon\'s usb')
-        self.run_cmd(*cmd, timeout=10)
-
-    def stop_collection(self, collection_name):
-        """Stops the active collection."""
-        self._log.debug('stopping collection %s', collection_name)
-        self.run_cmd('--name',
-                     collection_name,
-                     '--stop')
-        self._log.debug('stopped collection %s', collection_name)
-
-    def list_devices(self):
-        """Lists devices managed by the bits_server this client is connected
-        to.
-
-        Returns:
-            bits' output when called with --list devices.
-        """
-        self._log.debug('listing devices')
-        result = self.run_cmd('--list', 'devices', timeout=20)
-        return result
-
-    def list_channels(self, collection_name):
-        """Finds all the available channels in a given collection.
-
-        Args:
-            collection_name: The name of the collection to get channels from.
-        """
-        metrics = self.get_metrics(collection_name)
-        return [channel['name'] for channel in metrics['data']]
-
-    def export_as_monsoon_format(self, dest_path, collection_name,
-                                 channel_pattern):
-        """Exports data from a collection in monsoon style.
-
-        This function exists because there are tools that have been built on
-        top of the monsoon format. To be able to leverage such tools we need
-        to make the data compliant with the format.
-
-        The monsoon format is:
-
-        <time_since_epoch_in_secs> <amps>
-
-        Args:
-            dest_path: Path where the resulting file will be generated.
-            collection_name: The name of the Bits' collection to export data
-            from.
-            channel_pattern: A regex that matches the Bits' channel to be used
-            as source of data. If there are multiple matching channels, only the
-            first one will be used. The channel is always assumed to be
-            expressed en milli-amps, the resulting format requires amps, so the
-            values coming from the first matching channel will always be
-            multiplied by 1000.
-        """
-        with tempfile.NamedTemporaryFile(prefix='bits_csv_') as tmon:
-            self.export_as_csv([channel_pattern], collection_name, tmon.name)
-
-            self._log.debug(
-                'massaging bits csv to monsoon format for collection'
-                ' %s', collection_name)
-            with open(tmon.name) as csv_file:
-                reader = csv.reader(csv_file)
-                headers = next(reader)
-                self._log.debug('csv headers %s', headers)
-                with open(dest_path, 'w') as dest:
-                    for row in reader:
-                        ts = float(row[0]) / 1e9
-                        amps = float(row[1]) / 1e3
-                        dest.write('%.7f %.12f\n' % (ts, amps))
diff --git a/src/antlion/controllers/bits_lib/bits_service.py b/src/antlion/controllers/bits_lib/bits_service.py
deleted file mode 100644
index ad2f660..0000000
--- a/src/antlion/controllers/bits_lib/bits_service.py
+++ /dev/null
@@ -1,223 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import atexit
-import json
-import logging
-import os
-import re
-import signal
-import tempfile
-import time
-
-from enum import Enum
-
-from antlion import context
-from antlion.libs.proc import job
-from antlion.libs.proc import process
-
-
-class BitsServiceError(Exception):
-    pass
-
-
-class BitsServiceStates(Enum):
-    NOT_STARTED = 'not-started'
-    STARTED = 'started'
-    STOPPED = 'stopped'
-
-
-class BitsService(object):
-    """Helper class to start and stop a bits service
-
-    Attributes:
-        port: When the service starts the port it was assigned to is made
-        available for external agents to reference to the background service.
-        config: The BitsServiceConfig used to configure this service.
-        name: A free form string.
-        service_state: A BitsServiceState that represents the service state.
-    """
-
-    def __init__(self, config, binary, output_log_path,
-                 name='bits_service_default',
-                 timeout=None):
-        """Creates a BitsService object.
-
-        Args:
-            config: A BitsServiceConfig.
-            described in go/pixel-bits/user-guide/service/configuration.md
-            binary: Path to a bits_service binary.
-            output_log_path: Full path to where the resulting logs should be
-            stored.
-            name: Optional string to identify this service by. This
-            is used as reference in logs to tell this service apart from others
-            running in parallel.
-            timeout: Maximum time in seconds the service should be allowed
-            to run in the background after start. If left undefined the service
-            in the background will not time out.
-        """
-        self.name = name
-        self.port = None
-        self.config = config
-        self.service_state = BitsServiceStates.NOT_STARTED
-        self._timeout = timeout
-        self._binary = binary
-        self._log = logging.getLogger()
-        self._process = None
-        self._output_log = open(output_log_path, 'w')
-        self._collections_dir = tempfile.TemporaryDirectory(
-            prefix='bits_service_collections_dir_')
-        self._cleaned_up = False
-        atexit.register(self._atexit_cleanup)
-
-    def _atexit_cleanup(self):
-        if not self._cleaned_up:
-            self._log.error('Cleaning up bits_service %s at exit.', self.name)
-            self._cleanup()
-
-    def _write_extra_debug_logs(self):
-        dmesg_log = '%s.dmesg.txt' % self._output_log.name
-        dmesg = job.run(['dmesg', '-e'], ignore_status=True)
-        with open(dmesg_log, 'w') as f:
-            f.write(dmesg.stdout)
-
-        free_log = '%s.free.txt' % self._output_log.name
-        free = job.run(['free', '-m'], ignore_status=True)
-        with open(free_log, 'w') as f:
-            f.write(free.stdout)
-
-        df_log = '%s.df.txt' % self._output_log.name
-        df = job.run(['df', '-h'], ignore_status=True)
-        with open(df_log, 'w') as f:
-            f.write(df.stdout)
-
-    def _cleanup(self):
-        self._write_extra_debug_logs()
-        self.port = None
-        self._collections_dir.cleanup()
-        if self._process and self._process.is_running():
-            self._process.signal(signal.SIGINT)
-            self._log.debug('SIGINT sent to bits_service %s.' % self.name)
-            self._process.wait(kill_timeout=60.0)
-            self._log.debug('bits_service %s has been stopped.' % self.name)
-        self._output_log.close()
-        if self.config.has_monsoon:
-            job.run([self.config.monsoon_config.monsoon_binary,
-                     '--serialno',
-                     str(self.config.monsoon_config.serial_num),
-                     '--usbpassthrough',
-                     'on'],
-                    timeout=10)
-        self._cleaned_up = True
-
-    def _service_started_listener(self, line):
-        if self.service_state is BitsServiceStates.STARTED:
-            return
-        if 'Started server!' in line and self.port is not None:
-            self.service_state = BitsServiceStates.STARTED
-
-    PORT_PATTERN = re.compile(r'.*Server listening on .*:(\d+)\.$')
-
-    def _service_port_listener(self, line):
-        if self.port is not None:
-            return
-        match = self.PORT_PATTERN.match(line)
-        if match:
-            self.port = match.group(1)
-
-    def _output_callback(self, line):
-        self._output_log.write(line)
-        self._output_log.write('\n')
-        self._service_port_listener(line)
-        self._service_started_listener(line)
-
-    def _trigger_background_process(self, binary):
-        config_path = os.path.join(
-            context.get_current_context().get_full_output_path(),
-            '%s.config.json' % self.name)
-        with open(config_path, 'w') as f:
-            f.write(json.dumps(self.config.config_dic, indent=2))
-
-        cmd = [binary,
-               '--port',
-               '0',
-               '--collections_folder',
-               self._collections_dir.name,
-               '--collector_config_file',
-               config_path]
-
-        # bits_service only works on linux systems, therefore is safe to assume
-        # that 'timeout' will be available.
-        if self._timeout:
-            cmd = ['timeout',
-                   '--signal=SIGTERM',
-                   '--kill-after=60',
-                   str(self._timeout)] + cmd
-
-        self._process = process.Process(cmd)
-        self._process.set_on_output_callback(self._output_callback)
-        self._process.set_on_terminate_callback(self._on_terminate)
-        self._process.start()
-
-    def _on_terminate(self, *_):
-        self._log.error('bits_service %s stopped unexpectedly.', self.name)
-        self._cleanup()
-
-    def start(self):
-        """Starts the bits service in the background.
-
-        This function blocks until the background service signals that it has
-        successfully started. A BitsServiceError is raised if the signal is not
-        received.
-        """
-        if self.service_state is BitsServiceStates.STOPPED:
-            raise BitsServiceError(
-                'bits_service %s was already stopped. A stopped'
-                ' service can not be started again.' % self.name)
-
-        if self.service_state is BitsServiceStates.STARTED:
-            raise BitsServiceError(
-                'bits_service %s has already been started.' % self.name)
-
-        self._log.info('starting bits_service %s', self.name)
-        self._trigger_background_process(self._binary)
-
-        # wait 40 seconds for the service to be ready.
-        max_startup_wait = time.time() + 40
-        while time.time() < max_startup_wait:
-            if self.service_state is BitsServiceStates.STARTED:
-                self._log.info('bits_service %s started on port %s', self.name,
-                               self.port)
-                return
-            time.sleep(0.1)
-
-        self._log.error('bits_service %s did not start on time, starting '
-                        'service teardown and raising a BitsServiceError.')
-        self._cleanup()
-        raise BitsServiceError(
-            'bits_service %s did not start successfully' % self.name)
-
-    def stop(self):
-        """Stops the bits service."""
-        if self.service_state is BitsServiceStates.STOPPED:
-            raise BitsServiceError(
-                'bits_service %s has already been stopped.' % self.name)
-        port = self.port
-        self._log.info('stopping bits_service %s on port %s', self.name, port)
-        self.service_state = BitsServiceStates.STOPPED
-        self._cleanup()
-        self._log.info('bits_service %s on port %s was stopped', self.name,
-                       port)
diff --git a/src/antlion/controllers/bits_lib/bits_service_config.py b/src/antlion/controllers/bits_lib/bits_service_config.py
deleted file mode 100644
index 1900869..0000000
--- a/src/antlion/controllers/bits_lib/bits_service_config.py
+++ /dev/null
@@ -1,249 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import copy
-
-DEFAULT_MONSOON_CONFIG_DICT = {
-    'enabled': 1,
-    'type': 'monsooncollector',
-    'monsoon_reset': 0,
-    # maximum monsoon sample rate that works best for both lvpm and hvpm
-    'sampling_rate': 1000,
-}
-
-
-class _BitsMonsoonConfig(object):
-    """Helper object to construct a bits_service config from a monsoon config as
-    defined for the bits controller config and required additional resources,
-    such as paths to executables.
-
-    The format for the bits_service's monsoon configuration is explained at:
-    http://go/pixel-bits/user-guide/service/collectors/monsoon
-
-    Attributes:
-        config_dic: A bits_service's monsoon configuration as a python
-        dictionary.
-    """
-
-    def __init__(self, monsoon_config, lvpm_monsoon_bin=None,
-                 hvpm_monsoon_bin=None):
-        """Constructs _BitsServiceMonsoonConfig.
-
-        Args:
-            monsoon_config: The monsoon config as defined in the
-                ACTS Bits controller config. Expected format is:
-                  { 'serial_num': <serial number:int>,
-                    'monsoon_voltage': <voltage:double> }
-            lvpm_monsoon_bin: Binary file to interact with low voltage monsoons.
-                Needed if the monsoon is a lvpm monsoon (serial number lower
-                than 20000).
-            hvpm_monsoon_bin: Binary file to interact with high voltage
-                monsoons. Needed if the monsoon is a hvpm monsoon (serial number
-                greater than 20000).
-        """
-        if 'serial_num' not in monsoon_config:
-            raise ValueError(
-                'Monsoon serial_num can not be undefined. Received '
-                'config was: %s' % monsoon_config)
-        if 'monsoon_voltage' not in monsoon_config:
-            raise ValueError('Monsoon voltage can not be undefined. Received '
-                             'config was: %s' % monsoon_config)
-
-        self.serial_num = int(monsoon_config['serial_num'])
-        self.monsoon_voltage = float(monsoon_config['monsoon_voltage'])
-
-        self.config_dic = copy.deepcopy(DEFAULT_MONSOON_CONFIG_DICT)
-        if float(self.serial_num) >= 20000:
-            self.config_dic['hv_monsoon'] = 1
-            if hvpm_monsoon_bin is None:
-                raise ValueError('hvpm_monsoon binary is needed but was None. '
-                                 'Received config was: %s' % monsoon_config)
-            self.monsoon_binary = hvpm_monsoon_bin
-        else:
-            self.config_dic['hv_monsoon'] = 0
-            if lvpm_monsoon_bin is None:
-                raise ValueError('lvpm_monsoon binary is needed but was None. '
-                                 'Received config was: %s' % monsoon_config)
-            self.monsoon_binary = lvpm_monsoon_bin
-
-        self.config_dic['monsoon_binary_path'] = self.monsoon_binary
-        self.config_dic['monsoon_voltage'] = self.monsoon_voltage
-        self.config_dic['serial_num'] = self.serial_num
-
-
-DEFAULT_KIBBLES_BOARD_CONFIG = {
-    'enabled': 1,
-    'type': 'kibblecollector',
-    'attached_kibbles': {}
-}
-
-DEFAULT_KIBBLE_CONFIG = {
-    'ultra_channels_current_hz': 976.5625,
-    'ultra_channels_voltage_hz': 976.5625,
-    'high_channels_current_hz': 976.5625,
-    'high_channels_voltage_hz': 976.5625
-}
-
-
-class _BitsKibblesConfig(object):
-    def __init__(self, kibbles_config, kibble_bin, kibble_board_file):
-        """Constructs _BitsKibblesConfig.
-
-        Args:
-            kibbles_config: A list of compacted kibble boards descriptions.
-                Expected format is:
-                    [{
-                        'board': 'BoardName1',
-                        'connector': 'A',
-                        'serial': 'serial_1'
-                     },
-                    {
-                        'board': 'BoardName2',
-                        'connector': 'D',
-                        'serial': 'serial_2'
-                    }]
-                More details can be found at go/acts-bits.
-            kibble_bin: Binary file to interact with kibbles.
-            kibble_board_file: File describing the distribution of rails on a
-                kibble. go/kibble#setting-up-bits-board-files
-        """
-
-        if not isinstance(kibbles_config, list):
-            raise ValueError(
-                'kibbles_config must be a list. Got %s.' % kibbles_config)
-
-        if kibble_bin is None:
-            raise ValueError('Kibbles were present in the config but no '
-                             'kibble_bin was provided')
-        if kibble_board_file is None:
-            raise ValueError('Kibbles were present in the config but no '
-                             'kibble_board_file was provided')
-
-        self.boards_configs = {}
-
-        for kibble in kibbles_config:
-            if 'board' not in kibble:
-                raise ValueError('An individual kibble config must have a '
-                                 'board')
-            if 'connector' not in kibble:
-                raise ValueError('An individual kibble config must have a '
-                                 'connector')
-            if 'serial' not in kibble:
-                raise ValueError('An individual kibble config must have a '
-                                 'serial')
-
-            board = kibble['board']
-            connector = kibble['connector']
-            serial = kibble['serial']
-            if board not in self.boards_configs:
-                self.boards_configs[board] = copy.deepcopy(
-                    DEFAULT_KIBBLES_BOARD_CONFIG)
-                self.boards_configs[board][
-                    'board_file'] = kibble_board_file
-                self.boards_configs[board]['kibble_py'] = kibble_bin
-            kibble_config = copy.deepcopy(DEFAULT_KIBBLE_CONFIG)
-            kibble_config['connector'] = connector
-            self.boards_configs[board]['attached_kibbles'][
-                serial] = kibble_config
-
-
-DEFAULT_SERVICE_CONFIG_DICT = {
-    'devices': {
-        'default_device': {
-            'enabled': 1,
-            'collectors': {}
-        }
-    }
-}
-
-
-class BitsServiceConfig(object):
-    """Helper object to construct a bits_service config from a bits controller
-    config and required additional resources, such as paths to executables.
-
-    The format for bits_service's configuration is explained in:
-    go/pixel-bits/user-guide/service/configuration.md
-
-    Attributes:
-        config_dic: A bits_service configuration as a python dictionary.
-    """
-
-    def __init__(self, controller_config, lvpm_monsoon_bin=None,
-                 hvpm_monsoon_bin=None, kibble_bin=None,
-                 kibble_board_file=None, virtual_metrics_file=None):
-        """Creates a BitsServiceConfig.
-
-        Args:
-            controller_config: The config as defined in the ACTS  BiTS
-                controller config. Expected format is:
-                {
-                    // optional
-                    'Monsoon':   {
-                        'serial_num': <serial number:int>,
-                        'monsoon_voltage': <voltage:double>
-                    }
-                    // optional
-                    'Kibble': [
-                        {
-                            'board': 'BoardName1',
-                            'connector': 'A',
-                            'serial': 'serial_1'
-                        },
-                        {
-                            'board': 'BoardName2',
-                            'connector': 'D',
-                            'serial': 'serial_2'
-                        }
-                    ]
-                }
-            lvpm_monsoon_bin: Binary file to interact with low voltage monsoons.
-                Needed if the monsoon is a lvpm monsoon (serial number lower
-                than 20000).
-            hvpm_monsoon_bin: Binary file to interact with high voltage
-                monsoons. Needed if the monsoon is a hvpm monsoon (serial number
-                greater than 20000).
-            kibble_bin: Binary file to interact with kibbles.
-            kibble_board_file: File describing the distribution of rails on a
-                kibble. go/kibble#setting-up-bits-board-files
-            virtual_metrics_file: A list of virtual metrics files to add
-                data aggregates on top of regular channel aggregates.
-                go/pixel-bits/user-guide/virtual-metrics
-        """
-        self.config_dic = copy.deepcopy(DEFAULT_SERVICE_CONFIG_DICT)
-        self.has_monsoon = False
-        self.has_kibbles = False
-        self.has_virtual_metrics_file = False
-        self.monsoon_config = None
-        self.kibbles_config = None
-        if 'Monsoon' in controller_config:
-            self.has_monsoon = True
-            self.monsoon_config = _BitsMonsoonConfig(
-                controller_config['Monsoon'],
-                lvpm_monsoon_bin,
-                hvpm_monsoon_bin)
-            self.config_dic['devices']['default_device']['collectors'][
-                'Monsoon'] = self.monsoon_config.config_dic
-        if 'Kibbles' in controller_config:
-            self.has_kibbles = True
-            self.kibbles_config = _BitsKibblesConfig(
-                controller_config['Kibbles'],
-                kibble_bin, kibble_board_file)
-            self.config_dic['devices']['default_device']['collectors'].update(
-                self.kibbles_config.boards_configs)
-            if virtual_metrics_file is not None:
-                self.config_dic['devices']['default_device'][
-                    'vm_files'] = [virtual_metrics_file]
-                self.has_virtual_metrics_file = True
diff --git a/src/antlion/controllers/bluetooth_pts_device.py b/src/antlion/controllers/bluetooth_pts_device.py
deleted file mode 100644
index 1dbe5c7..0000000
--- a/src/antlion/controllers/bluetooth_pts_device.py
+++ /dev/null
@@ -1,764 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Prerequisites:
-    Windows 10
-    Bluetooth PTS installed
-    Recommended: Running cmder as Admin: https://cmder.net/
-
-### BEGIN SETUP STEPS###
-1. Install latest version of Python for windows:
-    https://www.python.org/downloads/windows/
-
-Tested successfully on Python 3.7.3.:
-    https://www.python.org/ftp/python/3.7.3/python-3.7.3.exe
-
-2. Launch Powershell and setup PATH:
-Setx PATH “%PATH%;C:/Users/<username>/AppData/Local/Programs/Python/Python37-32/Scripts”
-
-3. Launch Cmder as Admin before running any PTS related ACTS tests.
-
-
-### END SETUP STEPS###
-
-
-Bluetooth PTS controller.
-Mandatory parameters are log_directory and sig_root_directory.
-
-ACTS Config setup:
-"BluetoothPtsDevice": {
-    "log_directory": "C:\\Users\\fsbtt\\Documents\\Profile Tuning Suite\\Test_Dir",
-    "sig_root_directory": "C:\\Program Files (x86)\\Bluetooth SIG"
-}
-
-"""
-from antlion import signals
-from datetime import datetime
-
-import ctypes
-import logging
-import os
-import subprocess
-import time
-import xml.etree.ElementTree as ET
-
-from xml.dom import minidom
-from xml.etree.ElementTree import Element
-
-
-class BluetoothPtsDeviceConfigError(signals.ControllerError):
-    pass
-
-
-class BluetoothPtsSnifferError(signals.ControllerError):
-    pass
-
-
-MOBLY_CONTROLLER_CONFIG_NAME = "BluetoothPtsDevice"
-ACTS_CONTROLLER_REFERENCE_NAME = "bluetooth_pts_device"
-
-# Prefix to identify final verdict string. This is a PTS specific log String.
-VERDICT = 'VERDICT/'
-
-# Verdict strings that are specific to PTS.
-VERDICT_STRINGS = {
-    'RESULT_PASS': 'PASS',
-    'RESULT_FAIL': 'FAIL',
-    'RESULT_INCONC': 'INCONC',
-    'RESULT_INCOMP':
-    'INCOMP',  # Initial final verdict meaning that test has not completed yet.
-    'RESULT_NONE':
-    'NONE',  # Error verdict usually indicating internal PTS error.
-}
-
-# Sniffer ready log message.
-SNIFFER_READY = 'SNIFFER/Save and clear complete'
-
-# PTS Log Types as defined by PTS:
-LOG_TYPE_GENERAL_TEXT = 0
-LOG_TYPE_FIRST = 1
-LOG_TYPE_START_TEST_CASE = 1
-LOG_TYPE_TEST_CASE_ENDED = 2
-LOG_TYPE_START_DEFAULT = 3
-LOG_TYPE_DEFAULT_ENDED = 4
-LOG_TYPE_FINAL_VERDICT = 5
-LOG_TYPE_PRELIMINARY_VERDICT = 6
-LOG_TYPE_TIMEOUT = 7
-LOG_TYPE_ASSIGNMENT = 8
-LOG_TYPE_START_TIMER = 9
-LOG_TYPE_STOP_TIMER = 10
-LOG_TYPE_CANCEL_TIMER = 11
-LOG_TYPE_READ_TIMER = 12
-LOG_TYPE_ATTACH = 13
-LOG_TYPE_IMPLICIT_SEND = 14
-LOG_TYPE_GOTO = 15
-LOG_TYPE_TIMED_OUT_TIMER = 16
-LOG_TYPE_ERROR = 17
-LOG_TYPE_CREATE = 18
-LOG_TYPE_DONE = 19
-LOG_TYPE_ACTIVATE = 20
-LOG_TYPE_MESSAGE = 21
-LOG_TYPE_LINE_MATCHED = 22
-LOG_TYPE_LINE_NOT_MATCHED = 23
-LOG_TYPE_SEND_EVENT = 24
-LOG_TYPE_RECEIVE_EVENT = 25
-LOG_TYPE_OTHERWISE_EVENT = 26
-LOG_TYPE_RECEIVED_ON_PCO = 27
-LOG_TYPE_MATCH_FAILED = 28
-LOG_TYPE_COORDINATION_MESSAGE = 29
-
-PTS_DEVICE_EMPTY_CONFIG_MSG = "Configuration is empty, abort!"
-
-
-def create(config):
-    if not config:
-        raise errors.PTS_DEVICE_EMPTY_CONFIG_MSG
-    return get_instance(config)
-
-
-def destroy(pts):
-    try:
-        pts[0].clean_up()
-    except:
-        pts[0].log.error("Failed to clean up properly.")
-
-
-def get_info(pts_devices):
-    """Get information from the BluetoothPtsDevice object.
-
-    Args:
-        pts_devices: A list of BluetoothPtsDevice objects although only one
-        will ever be specified.
-
-    Returns:
-        A dict, representing info for BluetoothPtsDevice object.
-    """
-    return {
-        "address": pts_devices[0].address,
-        "sniffer_ready": pts_devices[0].sniffer_ready,
-        "ets_manager_library": pts_devices[0].ets_manager_library,
-        "log_directory": pts_devices[0].log_directory,
-        "pts_installation_directory":
-        pts_devices[0].pts_installation_directory,
-    }
-
-
-def get_instance(config):
-    """Create BluetoothPtsDevice instance from a dictionary containing
-    information related to PTS. Namely the SIG root directory as
-    sig_root_directory and the log directory represented by the log_directory.
-
-    Args:
-        config: A dict that contains BluetoothPtsDevice device info.
-
-    Returns:
-        A list of BluetoothPtsDevice objects.
-    """
-    result = []
-    try:
-        log_directory = config.pop("log_directory")
-    except KeyError:
-        raise BluetoothPtsDeviceConfigError(
-            "Missing mandatory log_directory in config.")
-    try:
-        sig_root_directory = config.pop("sig_root_directory")
-    except KeyError:
-        example_path = \
-            "C:\\\\Program Files (x86)\\\\Bluetooth SIG"
-        raise BluetoothPtsDeviceConfigError(
-            "Missing mandatory sig_root_directory in config. Example path: {}".
-            format(example_path))
-
-    # "C:\\Program Files (x86)\\Bluetooth SIG\\Bluetooth PTS\\bin\\ETSManager.dll"
-    ets_manager_library = "{}\\Bluetooth PTS\\bin\\ETSManager.dll".format(
-        sig_root_directory)
-    # "C:\\Program Files (x86)\\Bluetooth SIG\\Bluetooth PTS\\bin"
-    pts_installation_directory = "{}\\Bluetooth PTS\\bin".format(
-        sig_root_directory)
-    # "C:\\Program Files (x86)\\Bluetooth SIG\\Bluetooth Protocol Viewer"
-    pts_sniffer_directory = "{}\\Bluetooth Protocol Viewer".format(
-        sig_root_directory)
-    result.append(
-        BluetoothPtsDevice(ets_manager_library, log_directory,
-                           pts_installation_directory, pts_sniffer_directory))
-    return result
-
-
-class BluetoothPtsDevice:
-    """Class representing an Bluetooth PTS device and associated functions.
-
-    Each object of this class represents one BluetoothPtsDevice in ACTS.
-    """
-
-    _next_action = -1
-    _observers = []
-    address = ""
-    current_implicit_send_description = ""
-    devices = []
-    extra_answers = []
-    log_directory = ""
-    log = None
-    ics = None
-    ixit = None
-    profile_under_test = None
-    pts_library = None
-    pts_profile_mmi_request = ""
-    pts_test_result = VERDICT_STRINGS['RESULT_INCOMP']
-    sniffer_ready = False
-    test_log_directory = ""
-    test_log_prefix = ""
-
-    def __init__(self, ets_manager_library, log_directory,
-                 pts_installation_directory, pts_sniffer_directory):
-        self.log = logging.getLogger()
-        if ets_manager_library is not None:
-            self.ets_manager_library = ets_manager_library
-        self.log_directory = log_directory
-        if pts_installation_directory is not None:
-            self.pts_installation_directory = pts_installation_directory
-        if pts_sniffer_directory is not None:
-            self.pts_sniffer_directory = pts_sniffer_directory
-        # Define callback functions
-        self.USEAUTOIMPLSENDFUNC = ctypes.CFUNCTYPE(ctypes.c_bool)
-        self.use_auto_impl_send_func = self.USEAUTOIMPLSENDFUNC(
-            self.UseAutoImplicitSend)
-
-        self.DONGLE_MSG_FUNC = ctypes.CFUNCTYPE(ctypes.c_bool, ctypes.c_char_p)
-        self.dongle_msg_func = self.DONGLE_MSG_FUNC(self.DongleMsg)
-
-        self.DEVICE_SEARCH_MSG_FUNC = ctypes.CFUNCTYPE(ctypes.c_bool,
-                                                       ctypes.c_char_p,
-                                                       ctypes.c_char_p,
-                                                       ctypes.c_char_p)
-        self.dev_search_msg_func = self.DEVICE_SEARCH_MSG_FUNC(
-            self.DeviceSearchMsg)
-
-        self.LOGFUNC = ctypes.CFUNCTYPE(ctypes.c_bool, ctypes.c_char_p,
-                                        ctypes.c_char_p, ctypes.c_char_p,
-                                        ctypes.c_int, ctypes.c_void_p)
-        self.log_func = self.LOGFUNC(self.Log)
-
-        self.ONIMPLSENDFUNC = ctypes.CFUNCTYPE(ctypes.c_char_p,
-                                               ctypes.c_char_p, ctypes.c_int)
-        self.onimplsend_func = self.ONIMPLSENDFUNC(self.ImplicitSend)
-
-        # Helps with PTS reliability.
-        os.chdir(self.pts_installation_directory)
-        # Load EtsManager
-        self.pts_library = ctypes.cdll.LoadLibrary(self.ets_manager_library)
-        self.log.info("ETS Manager library {0:s} has been loaded".format(
-            self.ets_manager_library))
-        # If post-logging is turned on all callbacks to LPLOG-type function
-        # will be executed after test execution is complete. It is recommended
-        # that post-logging is turned on to avoid simultaneous invocations of
-        # LPLOG and LPAUTOIMPLICITSEND callbacks.
-        self.pts_library.SetPostLoggingEx(True)
-
-        self.xml_root = Element("ARCHIVE")
-        version = Element("VERSION")
-        version.text = "2.0"
-        self.xml_root.append(version)
-        self.xml_pts_pixit = Element("PicsPixit")
-        self.xml_pts_pixit.text = ""
-        self.xml_pts_running_log = Element("LOG")
-        self.xml_pts_running_log.text = ""
-        self.xml_pts_running_summary = Element("SUMMARY")
-        self.xml_pts_running_summary.text = ""
-
-    def clean_up(self):
-        # Since we have no insight to the actual PTS library,
-        # catch all Exceptions and log them.
-        try:
-            self.log.info("Cleaning up Stack...")
-            self.pts_library.ExitStackEx(self.profile_under_test)
-        except Exception as err:
-            self.log.error(
-                "Failed to clean up BluetoothPtsDevice: {}".format(err))
-        try:
-            self.log.info("Unregistering Profile...")
-            self.pts_library.UnregisterProfileEx.argtypes = [ctypes.c_char_p]
-            self.pts_library.UnregisterProfileEx(
-                self.profile_under_test.encode())
-            self.pts_library.UnRegisterGetDevInfoEx()
-        except Exception as err:
-            self.log.error(
-                "Failed to clean up BluetoothPtsDevice: {}".format(err))
-        try:
-            self.log.info("Cleaning up Sniffer")
-            self.pts_library.SnifferTerminateEx()
-        except Exception as err:
-            self.log.error(
-                "Failed to clean up BluetoothPtsDevice: {}".format(err))
-        self.log.info("Cleanup Done.")
-
-    def write_xml_pts_pixit_values_for_current_test(self):
-        """ Writes the current PICS and IXIT values to the XML result.
-        """
-        self.xml_pts_pixit.text = "ICS VALUES:\n\n"
-        for key, value in self.ics.items():
-            self.xml_pts_pixit.text += "{} {}\n".format(
-                key.decode(), value.decode())
-        self.xml_pts_pixit.text += "\nIXIT VALUES:\n\n"
-        for key, (_, value) in self.ixit.items():
-            self.xml_pts_pixit.text += "{} {}\n".format(
-                key.decode(), value.decode())
-
-    def set_ics_and_ixit(self, ics, ixit):
-        self.ics = ics
-        self.ixit = ixit
-
-    def set_profile_under_test(self, profile):
-        self.profile_under_test = profile
-
-    def setup_pts(self):
-        """Prepares PTS to run tests. This needs to be called in test classes
-        after ICS, IXIT, and setting Profile under test.
-        Specifically BluetoothPtsDevice functions:
-            set_profile_under_test
-            set_ics_and_ixit
-        """
-
-        # Register layer to test with callbacks
-        self.pts_library.RegisterProfileWithCallbacks.argtypes = [
-            ctypes.c_char_p, self.USEAUTOIMPLSENDFUNC, self.ONIMPLSENDFUNC,
-            self.LOGFUNC, self.DEVICE_SEARCH_MSG_FUNC, self.DONGLE_MSG_FUNC
-        ]
-        res = self.pts_library.RegisterProfileWithCallbacks(
-            self.profile_under_test.encode(), self.use_auto_impl_send_func,
-            self.onimplsend_func, self.log_func, self.dev_search_msg_func,
-            self.dongle_msg_func)
-
-        self.log.info(
-            "Profile has been registered with result {0:d}".format(res))
-
-        # GetDeviceInfo module is for discovering devices and PTS Dongle address
-        # Initialize GetDeviceInfo and register it with callbacks
-        # First parameter is PTS executable directory
-        self.pts_library.InitGetDevInfoWithCallbacks.argtypes = [
-            ctypes.c_char_p, self.DEVICE_SEARCH_MSG_FUNC, self.DONGLE_MSG_FUNC
-        ]
-        res = self.pts_library.InitGetDevInfoWithCallbacks(
-            self.pts_installation_directory.encode(), self.dev_search_msg_func,
-            self.dongle_msg_func)
-        self.log.info(
-            "GetDevInfo has been initialized with result {0:d}".format(res))
-        # Initialize PTS dongle
-        res = self.pts_library.VerifyDongleEx()
-        self.log.info(
-            "PTS dongle has been initialized with result {0:d}".format(res))
-
-        # Find PTS dongle address
-        self.pts_library.GetDongleBDAddress.restype = ctypes.c_ulonglong
-        self.address = self.pts_library.GetDongleBDAddress()
-        self.address_str = "{0:012X}".format(self.address)
-        self.log.info("PTS BD Address 0x{0:s}".format(self.address_str))
-
-        # Initialize Bluetooth Protocol Viewer communication module
-        self.pts_library.SnifferInitializeEx()
-
-        # If Bluetooth Protocol Viewer is not running, start it
-        if not self.is_sniffer_running():
-            self.log.info("Starting Protocol Viewer")
-            args = [
-                "{}\Executables\Core\FTS.exe".format(
-                    self.pts_sniffer_directory),
-                '/PTS Protocol Viewer=Generic',
-                '/OEMTitle=Bluetooth Protocol Viewer', '/OEMKey=Virtual'
-            ]
-            subprocess.Popen(args)
-            sniffer_timeout = 10
-            while not self.is_sniffer_running():
-                time.sleep(sniffer_timeout)
-
-        # Register to recieve Bluetooth Protocol Viewer notofications
-        self.pts_library.SnifferRegisterNotificationEx()
-        self.pts_library.SetParameterEx.argtypes = [
-            ctypes.c_char_p, ctypes.c_char_p, ctypes.c_char_p, ctypes.c_char_p
-        ]
-
-        for ics_name in self.ics:
-            res = self.pts_library.SetParameterEx(
-                ics_name, b'BOOLEAN', self.ics[ics_name],
-                self.profile_under_test.encode())
-            if res:
-                self.log.info("ICS {0:s} set successfully".format(
-                    str(ics_name)))
-            else:
-                self.log.error("Setting ICS {0:s} value failed".format(
-                    str(ics_name)))
-
-        for ixit_name in self.ixit:
-            res = self.pts_library.SetParameterEx(
-                ixit_name, (self.ixit[ixit_name])[0],
-                (self.ixit[ixit_name])[1], self.profile_under_test.encode())
-            if res:
-                self.log.info("IXIT {0:s} set successfully".format(
-                    str(ixit_name)))
-            else:
-                self.log.error("Setting IXIT {0:s} value failed".format(
-                    str(ixit_name)))
-
-        # Prepare directory to store Bluetooth Protocol Viewer output
-        if not os.path.exists(self.log_directory):
-            os.makedirs(self.log_directory)
-
-        address_b = self.address_str.encode("utf-8")
-        self.pts_library.InitEtsEx.argtypes = [
-            ctypes.c_char_p, ctypes.c_char_p, ctypes.c_char_p, ctypes.c_char_p
-        ]
-
-        implicit_send_path = "{}\\implicit_send3.dll".format(
-            self.pts_installation_directory).encode()
-        res = self.pts_library.InitEtsEx(self.profile_under_test.encode(),
-                                         self.log_directory.encode(),
-                                         implicit_send_path, address_b)
-        self.log.info("ETS has been initialized with result {0:s}".format(
-            str(res)))
-
-        # Initialize Host Stack DLL
-        self.pts_library.InitStackEx.argtypes = [ctypes.c_char_p]
-        res = self.pts_library.InitStackEx(self.profile_under_test.encode())
-        self.log.info("Stack has been initialized with result {0:s}".format(
-            str(res)))
-
-        # Select to receive Log messages after test is done
-        self.pts_library.SetPostLoggingEx.argtypes = [
-            ctypes.c_bool, ctypes.c_char_p
-        ]
-        self.pts_library.SetPostLoggingEx(True,
-                                          self.profile_under_test.encode())
-
-        # Clear Bluetooth Protocol Viewer. Dongle message callback will update
-        # sniffer_ready automatically. No need to fail setup if the timeout
-        # is exceeded since the logs will still be available just not starting
-        # from a clean slate. Just post a warning.
-        self.sniffer_ready = False
-        self.pts_library.SnifferClearEx()
-        end_time = time.time() + 10
-        while not self.sniffer_ready and time.time() < end_time:
-            time.sleep(1)
-        if not self.sniffer_ready:
-            self.log.warning("Sniffer not cleared. Continuing.")
-
-    def is_sniffer_running(self):
-        """ Looks for running Bluetooth Protocol Viewer process
-
-        Returns:
-            Returns True if finds one, False otherwise.
-        """
-        prog = [
-            line.split()
-            for line in subprocess.check_output("tasklist").splitlines()
-        ]
-        [prog.pop(e) for e in [0, 1, 2]]
-        for task in prog:
-            task_name = task[0].decode("utf-8")
-            if task_name == "Fts.exe":
-                self.log.info("Found FTS process successfully.")
-                # Sleep recommended by PTS.
-                time.sleep(1)
-                return True
-        return False
-
-    def UseAutoImplicitSend(self):
-        """Callback method that defines Which ImplicitSend will be used.
-
-        Returns:
-            True always to inform PTS to use the local implementation.
-        """
-        return True
-
-    def DongleMsg(self, msg_str):
-        """ Receives PTS dongle messages.
-
-        Specifically this receives the Bluetooth Protocol Viewer completed
-        save/clear operations.
-
-        Returns:
-            True if sniffer is ready, False otherwise.
-        """
-        msg = (ctypes.c_char_p(msg_str).value).decode("utf-8")
-        self.log.info(msg)
-        # Sleep recommended by PTS.
-        time.sleep(1)
-        if SNIFFER_READY in msg:
-            self.sniffer_ready = True
-        return True
-
-    def DeviceSearchMsg(self, addr_str, name_str, cod_str):
-        """ Receives device search messages
-
-        Each device may return multiple messages
-        Each message will contain device address and may contain device name and
-        COD.
-
-        Returns:
-            True always and reports to the callback appropriately.
-        """
-        addr = (ctypes.c_char_p(addr_str).value).replace(b'\xed',
-                                                         b' ').decode("utf-8")
-        name = (ctypes.c_char_p(name_str).value).replace(b'\xed',
-                                                         b' ').decode("utf-8")
-        cod = (ctypes.c_char_p(cod_str).value).replace(b'\xed',
-                                                       b' ').decode("utf-8")
-        self.devices.append(
-            "Device address = {0:s} name = {1:s} cod = {2:s}".format(
-                addr, name, cod))
-        return True
-
-    def Log(self, log_time_str, log_descr_str, log_msg_str, log_type, project):
-        """ Receives PTS log messages.
-
-        Returns:
-            True always and reports to the callback appropriately.
-        """
-        log_time = (ctypes.c_char_p(log_time_str).value).decode("utf-8")
-        log_descr = (ctypes.c_char_p(log_descr_str).value).decode("utf-8")
-        log_msg = (ctypes.c_char_p(log_msg_str).value).decode("utf-8")
-        if "Verdict Description" in log_descr:
-            self.xml_pts_running_summary.text += "\t- {}".format(log_msg)
-        if "Final Verdict" in log_descr:
-            self.xml_pts_running_summary.text += "{}{}\n".format(
-                log_descr.strip(), log_msg.strip())
-        full_log_msg = "{}{}{}".format(log_time, log_descr, log_msg)
-        self.xml_pts_running_log.text += "{}\n".format(str(full_log_msg))
-
-        if ctypes.c_int(log_type).value == LOG_TYPE_FINAL_VERDICT:
-            indx = log_msg.find(VERDICT)
-            if indx == 0:
-                if self.pts_test_result == VERDICT_STRINGS['RESULT_INCOMP']:
-                    if VERDICT_STRINGS['RESULT_INCONC'] in log_msg:
-                        self.pts_test_result = VERDICT_STRINGS['RESULT_INCONC']
-                    elif VERDICT_STRINGS['RESULT_FAIL'] in log_msg:
-                        self.pts_test_result = VERDICT_STRINGS['RESULT_FAIL']
-                    elif VERDICT_STRINGS['RESULT_PASS'] in log_msg:
-                        self.pts_test_result = VERDICT_STRINGS['RESULT_PASS']
-                    elif VERDICT_STRINGS['RESULT_NONE'] in log_msg:
-                        self.pts_test_result = VERDICT_STRINGS['RESULT_NONE']
-        return True
-
-    def ImplicitSend(self, description, style):
-        """ ImplicitSend callback
-
-        Implicit Send Styles:
-            MMI_Style_Ok_Cancel1 =     0x11041, Simple prompt           | OK, Cancel buttons      | Default: OK
-            MMI_Style_Ok_Cancel2 =     0x11141, Simple prompt           | Cancel button           | Default: Cancel
-            MMI_Style_Ok1 =            0x11040, Simple prompt           | OK button               | Default: OK
-            MMI_Style_Yes_No1 =        0x11044, Simple prompt           | Yes, No buttons         | Default: Yes
-            MMI_Style_Yes_No_Cancel1 = 0x11043, Simple prompt           | Yes, No buttons         | Default: Yes
-            MMI_Style_Abort_Retry1 =   0x11042, Simple prompt           | Abort, Retry buttons    | Default: Abort
-            MMI_Style_Edit1 =          0x12040, Request for data input  | OK, Cancel buttons      | Default: OK
-            MMI_Style_Edit2 =          0x12140, Select item from a list | OK, Cancel buttons      | Default: OK
-
-        Handling
-            MMI_Style_Ok_Cancel1
-                OK = return "OK"
-                Cancel = return 0
-
-            MMI_Style_Ok_Cancel2
-                OK = return "OK"
-                Cancel = return 0
-
-            MMI_Style_Ok1
-                OK = return "OK", this version should not return 0
-
-            MMI_Style_Yes_No1
-                Yes = return "OK"
-                No = return 0
-
-            MMI_Style_Yes_No_Cancel1
-                Yes = return "OK"
-                No = return 0
-                Cancel = has been deprecated
-
-            MMI_Style_Abort_Retry1
-                Abort = return 0
-                Retry = return "OK"
-
-            MMI_Style_Edit1
-                OK = return expected string
-                Cancel = return 0
-
-            MMI_Style_Edit2
-                OK = return expected string
-                Cancel = return 0
-
-        Receives ImplicitSend messages
-        Description format is as following:
-        {MMI_ID,Test Name,Layer Name}MMI Action\n\nDescription: MMI Description
-        """
-        descr_str = (ctypes.c_char_p(description).value).decode("utf-8")
-        # Sleep recommended by PTS.
-        time.sleep(1)
-        indx = descr_str.find('}')
-        implicit_send_info = descr_str[1:(indx)]
-        self.current_implicit_send_description = descr_str[(indx + 1):]
-        items = implicit_send_info.split(',')
-        implicit_send_info_id = items[0]
-        implicit_send_info_test_case = items[1]
-        self.pts_profile_mmi_request = items[2]
-        self.log.info(
-            "OnImplicitSend() has been called with the following parameters:\n"
-        )
-        self.log.info("\t\tproject_name = {0:s}".format(
-            self.pts_profile_mmi_request))
-        self.log.info("\t\tid = {0:s}".format(implicit_send_info_id))
-        self.log.info(
-            "\t\ttest_case = {0:s}".format(implicit_send_info_test_case))
-        self.log.info("\t\tdescription = {0:s}".format(
-            self.current_implicit_send_description))
-        self.log.info("\t\tstyle = {0:#X}".format(ctypes.c_int(style).value))
-        self.log.info("")
-        try:
-            self.next_action = int(implicit_send_info_id)
-        except Exception as err:
-            self.log.error(
-                "Setting verdict to RESULT_FAIL, exception found: {}".format(
-                    err))
-            self.pts_test_result = VERDICT_STRINGS['RESULT_FAIL']
-        res = b'OK'
-        if len(self.extra_answers) > 0:
-            res = self.extra_answers.pop(0).encode()
-        self.log.info("Sending Response: {}".format(res))
-        return res
-
-    def log_results(self, test_name):
-        """Log results.
-
-        Saves the sniffer results in cfa format and clears the sniffer.
-
-        Args:
-            test_name: string, name of the test run.
-        """
-        self.pts_library.SnifferCanSaveEx.restype = ctypes.c_bool
-        canSave = ctypes.c_bool(self.pts_library.SnifferCanSaveEx()).value
-        self.pts_library.SnifferCanSaveAndClearEx.restype = ctypes.c_bool
-        canSaveClear = ctypes.c_bool(
-            self.pts_library.SnifferCanSaveAndClearEx()).value
-        file_name = "\\{}.cfa".format(self.test_log_prefix).encode()
-        path = self.test_log_directory.encode() + file_name
-
-        if canSave == True:
-            self.pts_library.SnifferSaveEx.argtypes = [ctypes.c_char_p]
-            self.pts_library.SnifferSaveEx(path)
-        else:
-            self.pts_library.SnifferSaveAndClearEx.argtypes = [ctypes.c_char_p]
-            self.pts_library.SnifferSaveAndClearEx(path)
-        end_time = time.time() + 60
-        while self.sniffer_ready == False and end_time > time.time():
-            self.log.info("Waiting for sniffer to be ready...")
-            time.sleep(1)
-        if self.sniffer_ready == False:
-            raise BluetoothPtsSnifferError(
-                "Sniffer not ready after 60 seconds.")
-
-    def execute_test(self, test_name, test_timeout=60):
-        """Execute the input test name.
-
-        Preps PTS to run the test and waits up to 2 minutes for all steps
-        in the execution to finish. Cleanup of PTS related objects follows
-        any test verdict.
-
-        Args:
-            test_name: string, name of the test to execute.
-        """
-        today = datetime.now()
-        self.write_xml_pts_pixit_values_for_current_test()
-        # TODO: Find out how to grab the PTS version. Temporarily
-        # hardcoded to v.7.4.1.2.
-        self.xml_pts_pixit.text = (
-            "Test Case Started: {} v.7.4.1.2, {} started on {}\n\n{}".format(
-                self.profile_under_test, test_name,
-                today.strftime("%A, %B %d, %Y, %H:%M:%S"),
-                self.xml_pts_pixit.text))
-
-        self.xml_pts_running_summary.text += "Test case : {} started\n".format(
-            test_name)
-        log_time_formatted = "{:%Y_%m_%d_%H_%M_%S}".format(datetime.now())
-        formatted_test_name = test_name.replace('/', '_')
-        formatted_test_name = formatted_test_name.replace('-', '_')
-        self.test_log_prefix = "{}_{}".format(formatted_test_name,
-                                              log_time_formatted)
-        self.test_log_directory = "{}\\{}\\{}".format(self.log_directory,
-                                                      self.profile_under_test,
-                                                      self.test_log_prefix)
-        os.makedirs(self.test_log_directory)
-        curr_test = test_name.encode()
-
-        self.pts_library.StartTestCaseEx.argtypes = [
-            ctypes.c_char_p, ctypes.c_char_p, ctypes.c_bool
-        ]
-        res = self.pts_library.StartTestCaseEx(
-            curr_test, self.profile_under_test.encode(), True)
-        self.log.info("Test has been started with result {0:s}".format(
-            str(res)))
-
-        # Wait till verdict is received
-        self.log.info("Begin Test Execution... waiting for verdict.")
-        end_time = time.time() + test_timeout
-        while self.pts_test_result == VERDICT_STRINGS[
-                'RESULT_INCOMP'] and time.time() < end_time:
-            time.sleep(1)
-        self.log.info("End Test Execution... Verdict {}".format(
-            self.pts_test_result))
-
-        # Clean up after test is done
-        self.pts_library.TestCaseFinishedEx.argtypes = [
-            ctypes.c_char_p, ctypes.c_char_p
-        ]
-        res = self.pts_library.TestCaseFinishedEx(
-            curr_test, self.profile_under_test.encode())
-
-        self.log_results(test_name)
-        self.xml_pts_running_summary.text += "{} finished\n".format(test_name)
-        # Add the log results to the XML output
-        self.xml_root.append(self.xml_pts_pixit)
-        self.xml_root.append(self.xml_pts_running_log)
-        self.xml_root.append(self.xml_pts_running_summary)
-        rough_string = ET.tostring(self.xml_root,
-                                   encoding='utf-8',
-                                   method='xml')
-        reparsed = minidom.parseString(rough_string)
-        with open(
-                "{}\\{}.xml".format(self.test_log_directory,
-                                    self.test_log_prefix), "w") as writter:
-            writter.write(
-                reparsed.toprettyxml(indent="  ", encoding="utf-8").decode())
-
-        if self.pts_test_result is VERDICT_STRINGS['RESULT_PASS']:
-            return True
-        return False
-
-    """Observer functions"""
-
-    def bind_to(self, callback):
-        """ Callbacks to add to the observer.
-        This is used for DUTS automatic responses (ImplicitSends local
-        implementation).
-        """
-        self._observers.append(callback)
-
-    @property
-    def next_action(self):
-        return self._next_action
-
-    @next_action.setter
-    def next_action(self, action):
-        self._next_action = action
-        for callback in self._observers:
-            callback(self._next_action)
-
-    """End Observer functions"""
diff --git a/src/antlion/controllers/buds_controller.py b/src/antlion/controllers/buds_controller.py
deleted file mode 100644
index 0040d85..0000000
--- a/src/antlion/controllers/buds_controller.py
+++ /dev/null
@@ -1,83 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""This is the controller module for Pixel Buds devices.
-
-For the device definition, see buds_lib.apollo_lib.
-"""
-
-from antlion.controllers.buds_lib.apollo_lib import ParentDevice
-
-
-MOBLY_CONTROLLER_CONFIG_NAME = 'BudsDevice'
-ACTS_CONTROLLER_REFERENCE_NAME = 'buds_devices'
-
-
-class ConfigError(Exception):
-    """Raised when the configuration is malformatted."""
-
-
-def create(configs):
-    """Creates a Pixel Buds device for each config found within the configs.
-
-    Args:
-        configs: The configs can be structured in the following ways:
-
-                    ['serial1', 'serial2', ... ]
-
-                    [
-                        {
-                            'serial': 'serial1',
-                            'label': 'some_info',
-                            ...
-                        },
-                        {
-                            'serial': 'serial2',
-                            'label': 'other_info',
-                            ...
-                        }
-                    ]
-    """
-    created_controllers = []
-
-    if not isinstance(configs, list):
-        raise ConfigError('Malformatted config %s. Must be a list.' % configs)
-
-    for config in configs:
-        if isinstance(config, str):
-            created_controllers.append(ParentDevice(config))
-        elif isinstance(config, dict):
-            serial = config.get('serial', None)
-            if not serial:
-                raise ConfigError('Buds Device %s is missing entry "serial".' %
-                                  config)
-            created_controllers.append(ParentDevice(serial))
-        else:
-            raise ConfigError('Malformatted config: "%s". Must be a string or '
-                              'dict' % config)
-    return created_controllers
-
-
-def destroy(buds_device_list):
-    pass
-
-
-def get_info(buds_device_list):
-    device_infos = []
-    for buds_device in buds_device_list:
-        device_infos.append({'serial': buds_device.serial_number,
-                             'name': buds_device.device_name})
-    return device_infos
diff --git a/src/antlion/controllers/buds_lib/__init__.py b/src/antlion/controllers/buds_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/buds_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/buds_lib/apollo_lib.py b/src/antlion/controllers/buds_lib/apollo_lib.py
deleted file mode 100644
index 1a63c62..0000000
--- a/src/antlion/controllers/buds_lib/apollo_lib.py
+++ /dev/null
@@ -1,1514 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Apollo Commander through USB/UART interface.
-
-It uses python serial lib to communicate to a Apollo device.
-Some of the commander may not work yet, pending on the final version of the
-commander implementation.
-
-Typical usage examples:
-
-    To get a list of all apollo devices:
-    >>> devices = apollo_lib.get_devices()
-
-    To work with a specific apollo device:
-    >>> apollo = apollo_lib.Device(serial_number='ABCDEF0123456789',
-    >>> commander_port='/dev/ttyACM0')
-
-    To send a single command:
-    >>> apollo.cmd('PowOff')
-
-    To send a list of commands:
-    >>> apollo.cmd(['PowOff', 'PowOn', 'VolUp', 'VolDown']
-"""
-import atexit
-import os
-import re
-import subprocess
-import time
-from logging import Logger
-
-import serial
-from tenacity import retry, stop_after_attempt, wait_exponential
-
-from antlion.controllers.buds_lib import tako_trace_logger
-from antlion.controllers.buds_lib import logserial
-from antlion.controllers.buds_lib.b29_lib import B29Device
-from antlion.controllers.buds_lib.dev_utils import apollo_log_decoder
-from antlion.controllers.buds_lib.dev_utils import apollo_log_regex
-from antlion.controllers.buds_lib.dev_utils import apollo_sink_events
-
-logging = tako_trace_logger.TakoTraceLogger(Logger('apollo'))
-
-BAUD_RATE = 115200
-BYTE_SIZE = 8
-PARITY = 'N'
-STOP_BITS = 1
-DEFAULT_TIMEOUT = 3
-WRITE_TO_FLASH_WAIT = 30  # wait 30 sec when writing to external flash.
-LOG_REGEX = re.compile(r'(?P<time_stamp>\d+)\s(?P<msg>.*)')
-STATUS_REGEX = r'(?P<time_stamp>\d+)\s(?P<key>.+?): (?P<value>.+)'
-APOLLO_CHIP = '_Apollo_'
-DEVICE_REGEX = (
-    r'_(?P<device_serial>[A-Z0-9]+)-(?P<interface>\w+)'
-    r'\s->\s(\.\./){2}(?P<port>\w+)'
-)
-OTA_VERIFICATION_FAILED = 'OTA verification failed. corrupt image?'
-OTA_ERASING_PARTITION = 'INFO OTA eras ptns'
-OTA_RECEIVE_CSR_REGEX = r'INFO OTA CSR rcv begin'
-CODEC_REGEX = r'(?P<time_stamp>\d+)\s(?P<codec>\w+) codec is used.'
-BUILD_REGEX = r'\d+\.\d+\.(?P<build>\d+)-?(?P<psoc_build>\d*)-?(?P<debug>\w*)'
-
-
-class Error(Exception):
-    """Module Level Error."""
-
-
-class ResponseError(Error):
-    """cmd Response Error."""
-
-
-class DeviceError(Error):
-    """Device Error."""
-
-
-class ConnectError(Error):
-    """Connection Error."""
-
-
-def get_devices():
-    """Get all available Apollo devices.
-
-    Returns:
-        (list) A list of available devices or empty list if none found
-
-    Raises:
-        Error: raises Error if no Apollo devices or wrong interfaces were found.
-    """
-    devices = []
-    result = os.popen('ls -l /dev/serial/by-id/*%s*' % APOLLO_CHIP).read()
-    if not result:
-        raise Error('No Apollo Devices found.')
-    for line in result.splitlines():
-        match = re.search(DEVICE_REGEX, line)
-        interface = match.group('interface')
-        # TODO: The commander port will always be None.
-        commander_port = None
-        if interface == 'if00':
-            commander_port = '/dev/' + match.group('port')
-            continue
-        elif interface == 'if02':
-            log_port = '/dev/' + match.group('port')
-        else:
-            raise Error('Wrong interface found.')
-        device_serial = match.group('device_serial')
-
-        device = {
-            'commander_port': commander_port,
-            'log_port': log_port,
-            'serial_number': device_serial
-        }
-        devices.append(device)
-    return devices
-
-
-class BudsDevice(object):
-    """Provides a simple class to interact with Apollo."""
-
-    def __init__(self, serial_number, commander_port=None, log_port=None,
-                 serial_logger=None):
-        """Establish a connection to a Apollo.
-
-        Open a connection to a device with a specific serial number.
-
-        Raises:
-            ConnectError: raises ConnectError if cannot open the device.
-        """
-        self.set_log = False
-        self.connection_handle = None
-        self.device_closed = False
-        if serial_logger:
-            self.set_logger(serial_logger)
-        self.pc = logserial.PortCheck()
-        self.serial_number = serial_number
-        # TODO (kselvakumaran): move this to an interface device class that
-        # apollo_lib.BudsDevice should derive from
-        if not commander_port and not log_port:
-            self.get_device_ports(self.serial_number)
-        if commander_port:
-            self.commander_port = commander_port
-        if log_port:
-            self.log_port = log_port
-        self.apollo_log = None
-        self.cmd_log = None
-        self.apollo_log_regex = apollo_log_regex
-        self.dut_type = 'apollo'
-
-        # TODO (kselvakumaran): move this to an interface device class that
-        # apollo_lib.BudsDevice should derive from
-
-        try:  # Try to open the device
-            self.connection_handle = logserial.LogSerial(
-                self.commander_port, BAUD_RATE, flush_output=False,
-                serial_logger=logging)
-            self.wait_for_commander()
-        except (serial.SerialException, AssertionError, ConnectError) as e:
-            logging.error(
-                'error opening device {}: {}'.format(serial_number, e))
-            raise ConnectError('Error open the device.')
-        # disable sleep on idle
-        self.stay_connected_state = 1
-        atexit.register(self.close)
-
-    def set_logger(self, serial_logger):
-        global logging
-        logging = serial_logger
-        self.set_log = True
-        if self.connection_handle:
-            self.connection_handle.set_logger(serial_logger)
-
-    def get_device_ports(self, serial_number):
-        commander_query = {'ID_SERIAL_SHORT': serial_number,
-                           'ID_USB_INTERFACE_NUM': '00'}
-        log_query = {'ID_SERIAL_SHORT': serial_number,
-                     'ID_USB_INTERFACE_NUM': '02'}
-        self.commander_port = self.pc.search_port_by_property(commander_query)
-        self.log_port = self.pc.search_port_by_property(log_query)
-        if not self.commander_port and not self.log_port:
-            raise ConnectError(
-                'BudsDevice serial number %s not found' % serial_number)
-        else:
-            if not self.commander_port:
-                raise ConnectError('No devices found')
-            self.commander_port = self.commander_port[0]
-            self.log_port = self.log_port[0]
-
-    def get_all_log(self):
-        return self.connection_handle.get_all_log()
-
-    def query_log(self, from_timestamp, to_timestamp):
-        return self.connection_handle.query_serial_log(
-            from_timestamp=from_timestamp, to_timestamp=to_timestamp)
-
-    def send(self, cmd):
-        """Sends the command to serial port.
-
-        It does not care about whether the cmd is successful or not.
-
-        Args:
-            cmd: The passed command
-
-        Returns:
-            The number of characters written
-        """
-        logging.debug(cmd)
-        # with self._lock:
-        self.connection_handle.write(cmd)
-        result = self.connection_handle.read()
-        return result
-
-    def cmd(self, cmds, wait=None):
-        """Sends the commands and check responses.
-
-        Valid cmd will return something like '585857269 running cmd VolUp'.
-        Invalid cmd will log an error and return something like '585826369 No
-        command vol exists'.
-
-        Args:
-            cmds: The commands to the commander.
-            wait: wait in seconds for the cmd response.
-
-        Returns:
-            (list) The second element of the array returned by _cmd.
-        """
-        if isinstance(cmds, str):
-            cmds = [cmds]
-        results = []
-        for cmd in cmds:
-            _, result = self._cmd(cmd, wait=wait)
-            results.append(result)
-        return results
-
-    def _cmd(self, cmd, wait=None, throw_error=True):
-        """Sends a single command and check responses.
-
-        Valid cmd will return something like '585857269 running cmd VolUp'.
-        Invalid cmd will log an error and return something like '585826369 No
-        command vol exists'. Some cmd will return multiple lines of output.
-        eg. 'menu'.
-
-        Args:
-            cmd: The command to the commander.
-            wait: wait in seconds for the cmd response.
-            throw_error: Throw exception on True
-
-        Returns:
-            (list) containing such as the following:
-            [<return value>, [<protobuf dictionary>, str]]
-            Hex strings (protobuf) are replaced by its decoded dictionaries
-            and stored in an arry along with other string returned fom the
-            device.
-
-        Raises:
-            DeviceError: On Error.(Optional)
-        """
-        self.connection_handle.write(cmd)
-
-        while self.connection_handle.is_logging:
-            time.sleep(.01)
-        if wait:
-            self.wait(wait)
-        # Using read_serial_port as readlines is a blocking call until idle.
-        res = self.read_serial_port()
-        result = []
-        self.cmd_log = res
-        command_resv = False
-        # TODO: Cleanup the usage of the two booleans below.
-        command_finish = False
-        command_rejected = False
-        # for line in iter_res:
-        for line in res:
-            if isinstance(line, dict):
-                if 'COMMANDER_RECV_COMMAND' in line.values():
-                    command_resv = True
-                elif 'COMMANDER_REJECT_COMMAND' in line.values():
-                    logging.info('Command rejected')
-                    command_rejected = True
-                    break
-                elif 'COMMANDER_FINISH_COMMAND' in line.values():
-                    command_finish = True
-                    break
-                elif (command_resv and not command_finish and
-                      not command_rejected):
-                    result.append(line)
-            # TODO(jesussalinas): Remove when only encoded lines are required
-            elif command_resv and not command_finish and not command_rejected:
-                if 'running cmd' not in line:
-                    result.append(line)
-        success = True
-        if command_rejected or not command_resv:
-            success = False
-            if throw_error:
-                logging.info(res)
-                raise DeviceError('Unknown command %s' % cmd)
-        return success, result
-
-    def get_pdl(self):
-        """Returns the PDL stack dictionary.
-
-        The PDL stack stores paired devices of Apollo. Each PDL entry include
-        mac_address, flags, link_key, priority fields.
-
-        Returns:
-            list of pdl dicts.
-        """
-        # Get the mask from CONNLIB41:
-        # CONNLIB41 typically looks something like this: 2403 fff1
-        # 2403 fff1 is actually two 16-bit words of a 32-bit integer
-        # like 0xfff12403 . This tells the chronological order of the entries
-        # in the paired device list one nibble each. LSB to MSB corresponds to
-        # CONNLIB42 through CONNLIB49. So, the above tells us that the device at
-        # 0x2638 is the 3rd most recent entry 0x2639 the latest entry etc. As
-        # a device re-pairs the masks are updated.
-        response = []
-        mask = 'ffffffff'
-        res = self.cmd('GetPSHex 0x2637')
-        if len(res[0]) == 0:
-            logging.warning('Error reading PDL mask @ 0x2637')
-            return response
-        else:
-            regexp = r'\d+\s+(?P<m1>....)\s(?P<m2>....)'
-            match = re.match(regexp, res[0][0])
-            if match:
-                connlib41 = match.group('m2') + match.group('m1')
-                mask = connlib41[::-1]
-                logging.debug('PDL mask: %s' % mask)
-
-        # Now get the MAC/link key
-        mask_idx = 0
-        for i in range(9784, 9883):
-            types = {}
-            res = self.cmd('GetPSHex ' + '%0.2x' % i)
-            if len(res[0]) == 0:
-                break
-            else:
-                regexp = ('\d+\s+(?P<Mac>....\s....\s....)\s'
-                          '(?P<Flags>....\s....)\s(?P<Linkkey>.*)')
-                match = re.search(regexp, res[0][0])
-                if match:
-                    mac_address = match.group('Mac').replace(' ', '').upper()
-                    formatted_mac = ''
-                    for i in range(len(mac_address)):
-                        formatted_mac += mac_address[i]
-                        if i % 2 != 0 and i < (len(mac_address) - 1):
-                            formatted_mac += ':'
-                    types['mac_address'] = formatted_mac
-                    types['flags'] = match.group('Flags').replace(' ', '')
-                    types['link_key'] = match.group('Linkkey').replace(' ', '')
-                    types['priority'] = int(mask[mask_idx], 16)
-                    mask_idx += 1
-                    response.append(types)
-
-        return response
-
-    def set_pairing_mode(self):
-        """Enter Bluetooth Pairing mode."""
-        logging.debug('Inside set_pairing_mode()...')
-        try:
-            return self.cmd('Pair')
-        except DeviceError:
-            logging.exception('Pair cmd failed')
-
-    # TODO (kselvakumaran): move this to an interface BT class that
-    # apollo_lib.BudsDevice should derive from
-    def turn_on_bluetooth(self):
-        return True
-
-    # TODO (kselvakumaran): move this to an interface BT class that
-    # apollo_lib.BudsDevice should derive from
-    def is_bt_enabled(self):
-        """Check if BT is enabled.
-
-        (TODO:weisu)Currently it is always true since there is no way to disable
-        BT in apollo
-
-        Returns:
-            True if BT is enabled.
-        """
-        logging.debug('Inside is_bt_enabled()...')
-        return True
-
-    def panic(self):
-        """Hitting a panic, device will be automatically reset after 5s."""
-        logging.debug('Inside panic()...')
-        try:
-            self.send('panic')
-        except serial.SerialException:
-            logging.exception('panic cmd failed')
-
-    def power(self, cmd):
-        """Controls the power state of the device.
-
-        Args:
-            cmd: If 'Off', powers the device off. Otherwise, powers the device
-                 on.
-        """
-        logging.debug('Inside power({})...'.format(cmd))
-        mode = '0' if cmd == 'Off' else '1'
-        cmd = 'Pow ' + mode
-        try:
-            return self.cmd(cmd)
-        except DeviceError:
-            logging.exception('{} cmd failed'.format(cmd))
-
-    def charge(self, state):
-        """Charging Control of the device.
-
-        Args:
-          state: '1/0' to enable/disable charging.
-        """
-        logging.debug('Inside charge({})...'.format(state))
-        cmd = 'chg ' + state
-        try:
-            self.cmd(cmd)
-        except DeviceError:
-            logging.exception('{} cmd failed'.format(cmd))
-
-    def get_battery_level(self):
-        """Get the battery charge level.
-
-        Returns:
-            charge percentage string.
-
-        Raises:
-            DeviceError: GetBatt response error.
-        """
-        response = self.cmd('GetBatt')
-        for line in response[0]:
-            if line.find('Batt:') > -1:
-                # Response if in this format '<messageID> Batt: <percentage>'
-                return line.split()[2]
-        raise DeviceError('Battery Level not found in GetBatt response')
-
-    def get_gas_gauge_current(self):
-        """Get the Gauge current value.
-
-        Returns:
-            Float value with the info
-
-        Raises:
-            DeviceError: I2CRead response error.
-        """
-        response = self.cmd('I2CRead 2 0x29')
-        for line in response[0]:
-            if line.find('value') > -1:
-                return float.fromhex(line.split()[6].replace(',', ''))
-        raise DeviceError('Current Level not found in I2CRead response')
-
-    def get_gas_gauge_voltage(self):
-        """Get the Gauge voltage value.
-
-        Returns:
-            Float value with the info
-
-        Raises:
-            DeviceError: I2CRead response error.
-        """
-        response = self.cmd('I2CRead 2 0x2A')
-        for line in response[0]:
-            if line.find('value') > -1:
-                return float.fromhex(line.split()[6].replace(',', ''))
-        raise DeviceError('Voltage Level not found in I2CRead response')
-
-    def reset(self, wait=5):
-        """Resetting the device."""
-        logging.debug('Inside reset()...')
-        self.power('Off')
-        self.wait(wait)
-        self.power('On')
-
-    def close(self):
-        if not self.device_closed:
-            self.connection_handle.close()
-            self.device_closed = True
-            if not self.set_log:
-                logging.flush_log()
-
-    def get_serial_log(self):
-        """Retrieve the logs from connection handle."""
-        return self.connection_handle.get_all_log()
-
-    def factory_reset(self):
-        """Erase paired device(s) (bond) data and reboot device."""
-        cmd = 'FactoryReset 1'
-        self.send(cmd)
-        self.wait(5)
-        self.reconnect()
-
-    def reboot(self, reconnect=10, retry_timer=30):
-        """Rebooting the device.
-
-        Args:
-            reconnect: reconnect attempts after reboot, None for no reconnect.
-            retry_timer: wait time in seconds before next connect retry.
-
-        Returns:
-            True if successfully reboot or reconnect.
-        """
-        logging.debug('Inside reboot()...')
-        self.panic()
-        if not reconnect:
-            return True
-        ini_time = time.time()
-        message = 'waiting for {} to shutdown'.format(self.serial_number)
-        logging.info(message)
-        while True:
-            alive = self.connection_handle.is_port_alive()
-            if not alive:
-                logging.info('rebooted')
-                break
-            if time.time() - ini_time > 60:
-                logging.info('Shutdown timeouted')
-                break
-            time.sleep(0.5)
-        return self.reconnect(reconnect, retry_timer)
-
-    def reconnect(self, iterations=30, retry_timer=20):
-        """Reconnect to the device.
-
-        Args:
-            iterations: Number of retry iterations.
-            retry_timer: wait time in seconds before next connect retry.
-
-        Returns:
-            True if reconnect to the device successfully.
-
-        Raises:
-            DeviceError: Failed to reconnect.
-        """
-        logging.debug('Inside reconnect()...')
-        for i in range(iterations):
-            try:
-                # port might be changed, refresh the port list.
-                self.get_device_ports(self.serial_number)
-                message = 'commander_port: {}, log_port: {}'.format(
-                    self.commander_port, self.log_port)
-                logging.info(message)
-                self.connection_handle.refresh_port_connection(
-                    self.commander_port)
-                # Sometimes there might be sfome delay when commander is
-                # functioning.
-                self.wait_for_commander()
-                return True
-            except Exception as e:  # pylint: disable=broad-except
-                message = 'Fail to connect {} times due to {}'.format(
-                    i + 1, e)
-                logging.warning(message)
-                # self.close()
-                time.sleep(retry_timer)
-        raise DeviceError('Cannot reconnect to %s with %d attempts.',
-                          self.commander_port, iterations)
-
-    @retry(stop=stop_after_attempt(4),
-           wait=wait_exponential())
-    def wait_for_commander(self):
-        """Wait for commander to function.
-
-        Returns:
-            True if commander worked.
-
-        Raises:
-            DeviceError: Failed to bring up commander.
-        """
-        # self.Flush()
-        result = self.cmd('menu')
-        if result:
-            return True
-        else:
-            raise DeviceError('Cannot start commander.')
-
-    def wait(self, timeout=1):
-        """Wait for the device."""
-        logging.debug('Inside wait()...')
-        time.sleep(timeout)
-
-    def led(self, cmd):
-        """LED control of the device."""
-        message = 'Inside led({})...'.format(cmd)
-        logging.debug(message)
-        cmd = 'EventUsrLeds' + cmd
-        try:
-            return self.cmd(_evt_hex(cmd))
-        except DeviceError:
-            logging.exception('LED cmd failed')
-
-    def volume(self, key, times=1):
-        """Volume Control. (Down/Up).
-
-        Args:
-            key: Down --Decrease a volume.
-                 Up --Increase a volume.
-            times: Simulate number of swipes.
-
-        Returns:
-            (int) Volume level.
-
-        Raises:
-            DeviceError
-        """
-        message = 'Inside volume({}, {})...'.format(key, times)
-        logging.debug(message)
-        updown = {
-            'Up': '1',
-            'Down': '0',
-        }
-        cmds = ['ButtonSwipe ' + updown[key]] * times
-        logging.info(cmds)
-        try:
-            self.cmd(cmds)
-            for line in self.cmd_log:
-                if isinstance(line, dict):
-                    if 'id' in line and line['id'] == 'VOLUME_CHANGE':
-                        if 'data' in line and line['data']:
-                            return int(line['data'])
-        except DeviceError:
-            logging.exception('ButtonSwipe cmd failed')
-
-    def menu(self):
-        """Return a list of supported commands."""
-        logging.debug('Inside menu()...')
-        try:
-            return self.cmd('menu')
-        except DeviceError:
-            logging.exception('menu cmd failed')
-
-    def set_ohd(self, mode='AUTO'):
-        """Manually set the OHD status and override auto-detection.
-
-        Args:
-            mode: ON --OHD manual mode with on-ear state.
-                  OFF --OHD manual mode with off-ear state.
-                  AUTO --OHD auto-detection mode.
-        Raises:
-            DeviceError: OHD Command failure.
-        """
-        logging.debug('Inside set_ohd()...')
-        try:
-            if mode != 'AUTO':
-                # Set up OHD manual mode
-                self.cmd('Test 14 0 2 1')
-                if mode == 'ON':
-                    # Detects on-ear
-                    self.cmd('Test 14 0 2 1 0x3')
-                else:
-                    # Detects off-ear
-                    self.cmd('Test 14 0 2 1 0x0')
-            else:
-                # Default mode (auto detect.)
-                self.cmd('Test 14 0 2 0')
-        except DeviceError:
-            logging.exception('OHD cmd failed')
-
-    def music_control_events(self, cmd, regexp=None, wait=.5):
-        """Sends the EvtHex to control media player.
-
-        Arguments:
-            cmd: the command to perform.
-            regexp: Optional pattern to validate the event logs.
-
-        Returns:
-            Boolean: True if the command triggers the correct events on the
-                     device, False otherwise.
-
-        # TODO(nviboonchan:) Add more supported commands.
-        Supported commands:
-            'PlayPause'
-            'VolumeUp'
-            'VolumeDown',
-        """
-        cmd_regexp = {
-            # Play/ Pause would need to pass the regexp argument since it's
-            # sending the same event but returns different responses depending
-            # on the device state.
-            'VolumeUp': apollo_log_regex.VOLUP_REGEX,
-            'VolumeDown': apollo_log_regex.VOLDOWN_REGEX,
-        }
-        if not regexp:
-            if cmd not in cmd_regexp:
-                logmsg = 'Expected pattern is not defined for event %s' % cmd
-                logging.exception(logmsg)
-                return False
-            regexp = cmd_regexp[cmd]
-        self.cmd('EvtHex %s' % apollo_sink_events.SINK_EVENTS['EventUsr' + cmd],
-                 wait=wait)
-        for line in self.cmd_log:
-            if isinstance(line, str):
-                if re.search(regexp, line):
-                    return True
-            elif isinstance(line, dict):
-                if line.get('id', None) == 'AVRCP_PLAY_STATUS_CHANGE':
-                    return True
-        return False
-
-    def avrcp(self, cmd):
-        """sends the Audio/Video Remote Control Profile (avrcp) control command.
-
-        Supported commands:
-            'PlayPause'
-            'Stop'
-            'SkipForward',
-            'SkipBackward',
-            'FastForwardPress',
-            'FastForwardRelease',
-            'RewindPress',
-            'RewindRelease',
-            'ShuffleOff',
-            'ShuffleAllTrack',
-            'ShuffleGroup',
-            'RepeatOff':,
-            'RepeatSingleTrack',
-            'RepeatAllTrack',
-            'RepeatGroup',
-            'Play',
-            'Pause',
-            'ToggleActive',
-            'NextGroupPress',
-            'PreviousGroupPress',
-            'NextGroupRelease',
-            'PreviousGroupRelease',
-
-        Args:
-            cmd: The avrcp command.
-
-        """
-        cmd = 'EventUsrAvrcp' + cmd
-        logging.debug(cmd)
-        try:
-            self.cmd(_evt_hex(cmd))
-        except DeviceError:
-            logging.exception('avrcp cmd failed')
-
-    def enable_log(self, levels=None):
-        """Enable specified logs."""
-        logging.debug('Inside enable_log()...')
-        if levels is None:
-            levels = ['ALL']
-        masks = hex(
-            sum([int(apollo_sink_events.LOG_FEATURES[x], 16) for x in levels]))
-        try:
-            self.cmd('LogOff %s' % apollo_sink_events.LOG_FEATURES['ALL'])
-            return self.cmd('LogOn %s' % masks)
-        except DeviceError:
-            logging.exception('Enable log failed')
-
-    def disable_log(self, levels=None):
-        """Disable specified logs."""
-        logging.debug('Inside disable_log()...')
-        if levels is None:
-            levels = ['ALL']
-        masks = hex(
-            sum([int(apollo_sink_events.LOG_FEATURES[x], 16) for x in levels]))
-        try:
-            self.cmd('LogOn %s' % apollo_sink_events.LOG_FEATURES['ALL'])
-            return self.cmd('LogOff %s' % masks)
-        except DeviceError:
-            logging.exception('Disable log failed')
-
-    def write_to_flash(self, file_name=None):
-        """Write file to external flash.
-
-        Note: Assume pv is installed. If not, install it by
-              'apt-get install pv'.
-
-        Args:
-            file_name: Full path file name.
-
-        Returns:
-            Boolean: True if write to partition is successful. False otherwise.
-        """
-        logging.debug('Inside write_to_flash()...')
-        if not os.path.isfile(file_name):
-            message = 'DFU file %s not found.'.format(file_name)
-            logging.exception(message)
-            return False
-        logging.info(
-            'Write file {} to external flash partition ...'.format(file_name))
-        image_size = os.path.getsize(file_name)
-        logging.info('image size is {}'.format(image_size))
-        results = self.cmd('Ota {}'.format(image_size), wait=3)
-        logging.debug('Result of Ota command' + str(results))
-        if any(OTA_VERIFICATION_FAILED in result for result in results[0]):
-            return False
-        # finished cmd Ota
-        if (any('OTA_ERASE_PARTITION' in result.values() for result in
-                results[0] if
-                isinstance(result, dict)) or
-                any('OTA erasd ptns' in result for result in results[0])):
-            try:
-                # -B: buffer size in bytes, -L rate-limit in B/s.
-                subcmd = ('pv --force -B 160 -L 10000 %s > %s' %
-                          (file_name, self.commander_port))
-                logging.info(subcmd)
-                p = subprocess.Popen(subcmd, stdout=subprocess.PIPE, shell=True)
-            except OSError:
-                logging.exception(
-                    'pv not installed, please install by: apt-get install pv')
-                return False
-            try:
-                res = self.read_serial_port(read_until=6)
-            except DeviceError:
-                logging.exception('Unable to read the device port')
-                return False
-            for line in res:
-                if isinstance(line, dict):
-                    logging.info(line)
-                else:
-                    match = re.search(OTA_RECEIVE_CSR_REGEX, line)
-                    if match:
-                        logging.info(
-                            'OTA Image received. Transfer is in progress...')
-                        # Polling during a transfer could miss the final message
-                        # when the device reboots, so we wait until the transfer
-                        # completes.
-                        p.wait()
-                        return True
-            # No image transfer in progress.
-            return False
-        else:
-            return False
-
-    def flash_from_file(self, file_name, reconnect=True):
-        """Upgrade Apollo from an image file.
-
-        Args:
-            file_name: DFU file name. eg. /google/data/ro/teams/wearables/
-                       apollo/ota/master/v76/apollo.dfu
-            reconnect: True to reconnect the device after flashing
-        Returns:
-            Bool: True if the upgrade is successful. False otherwise.
-        """
-        logging.debug('Inside flash_from_file()...')
-        if self.write_to_flash(file_name):
-            logging.info('OTA image transfer is completed')
-            if reconnect:
-                # Transfer is completed; waiting for the device to reboot.
-                logging.info('wait to make sure old connection disappears.')
-                self.wait_for_reset(timeout=150)
-                self.reconnect()
-                logging.info('BudsDevice reboots successfully after OTA.')
-            return True
-
-    def open_mic(self, post_delay=5):
-        """Open Microphone on the device using EvtHex command.
-
-        Args:
-            post_delay: time delay in seconds after the microphone is opened.
-
-        Returns:
-            Returns True or False based on whether the command was executed.
-        """
-        logging.debug('Inside open_mic()...')
-        success, _ = self._cmd('Voicecmd 1', post_delay)
-        return success
-
-    def close_mic(self, post_delay=5):
-        """Close Microphone on the device using EvtHex command.
-
-        Args:
-            post_delay: time delay in seconds after the microphone is closed.
-
-        Returns:
-            Returns true or false based on whether the command was executed.
-        """
-        logging.debug('Inside close_mic()...')
-        success, _ = self._cmd('Voicecmd 0', post_delay)
-        return success
-
-    def touch_key_press_event(self, wait=1):
-        """send key press event command.
-
-        Args:
-            wait: Inject delay after key press to simulate real touch event .
-        """
-        logging.debug('Inside KeyPress()...')
-        self._cmd('Touch 6')
-        self.wait(wait)
-
-    def touch_tap_event(self, wait_if_pause=10):
-        """send key release event after key press to simulate single tap.
-
-        Args:
-            wait_if_pause: Inject delay after avrcp pause was detected.
-
-        Returns:
-            Returns False if avrcp play orp ause not detected else True.
-        """
-        logging.debug('Inside Touch Tap event()...')
-        self._cmd('Touch 4')
-        for line in self.cmd_log:
-            if 'avrcp play' in line:
-                logging.info('avrcp play detected')
-                return True
-            if 'avrcp pause' in line:
-                logging.info('avrcp pause detected')
-                self.wait(wait_if_pause)
-                return True
-        return False
-
-    def touch_hold_up_event(self):
-        """Open Microphone on the device using touch hold up command.
-
-        Returns:
-            Returns True or False based on whether the command was executed.
-        """
-        logging.debug('Inside open_mic()...')
-        self._cmd('Touch 3')
-        for line in self.cmd_log:
-            if 'Button 1 LONG_BEGIN' in line:
-                logging.info('mic open success')
-                return True
-        return False
-
-    def touch_hold_down_event(self):
-        """Close Microphone on the device using touch hold down command.
-
-        Returns:
-            Returns true or false based on whether the command was executed.
-        """
-        logging.debug('Inside close_mic()...')
-        self._cmd('Touch 8')
-        for line in self.cmd_log:
-            if 'Button 1 LONG_END' in line:
-                logging.info('mic close success')
-                return True
-        return False
-
-    def tap(self):
-        """Performs a Tap gesture."""
-        logging.debug('Inside tap()')
-        self.cmd('ButtonTap 0')
-
-    def hold(self, duration):
-        """Tap and hold a button.
-
-        Args:
-            duration: (int) duration in milliseconds.
-        """
-        logging.debug('Inside hold()')
-        self.cmd('ButtonHold ' + str(duration))
-
-    def swipe(self, direction):
-        """Perform a swipe gesture.
-
-        Args:
-            direction: (int) swipe direction 1 forward, 0 backward.
-        """
-        logging.debug('Inside swipe()')
-        self.cmd('ButtonSwipe ' + direction)
-
-    def get_pskey(self, key):
-        """Fetch value from persistent store."""
-        try:
-            cmd = 'GetPSHex ' + apollo_sink_events.PSKEY[key]
-        except KeyError:
-            raise DeviceError('PS Key: %s not found' % key)
-        pskey = ''
-        try:
-            ret = self.cmd(cmd)
-            for result in ret[0]:
-                if not re.search(r'pskey', result.lower()) and LOG_REGEX.match(
-                        result):
-                    # values are broken into words separated by spaces.
-                    pskey += LOG_REGEX.match(result).group('msg').replace(' ',
-                                                                          '')
-                else:
-                    continue
-        except DeviceError:
-            logging.exception('GetPSHex cmd failed')
-        return pskey
-
-    def get_version(self):
-        """Return a device version information.
-
-        Note: Version information is obtained from the firmware loader. Old
-        information is lost when firmware is updated.
-        Returns:
-            A dictionary of device version info. eg.
-            {
-                'Fw Build': '73',
-                'OTA Status': 'No OTA performed before this boot',
-            }
-
-        """
-        logging.debug('Inside get_version()...')
-        success, result = self._cmd('GetVer', throw_error=False)
-        status = {}
-        if result:
-            for line in result:
-                if isinstance(line, dict):
-                    status['build'] = line['vm_build_number']
-                    status['psoc_build'] = line['psoc_version']
-                    status['debug'] = line['csr_fw_debug_build']
-                    status['Fw Build Label'] = line['build_label']
-                    if 'last_ota_status' in line.keys():
-                        # Optional value in the proto response
-                        status['OTA Status'] = line['last_ota_status']
-                    else:
-                        status['OTA Status'] = 'No info'
-        return success, status
-
-    def get_earcon_version(self):
-        """Return a device Earson version information.
-
-        Returns:
-            Boolean:  True if success, False otherwise.
-            String: Earon Version e.g. 7001 0201 6100 0000
-
-        """
-        # TODO(nviboonchan): Earcon version format would be changed in the
-        # future.
-        logging.debug('Inside get_earcon_version()...')
-        result = self.get_pskey('PSKEY_EARCON_VERSION')
-        if result:
-            return True, result
-        else:
-            return False, None
-
-    def get_bt_status(self):
-        """Return a device bluetooth connection information.
-
-        Returns:
-            A dictionary of bluetooth status. eg.
-            {
-                'Comp. App': 'FALSE',
-               'HFP (pri.)', 'FALSE',
-               'HFP (sec.)': 'FALSE',
-               'A2DP (pri.)': 'FALSE',
-               'A2DP (sec.)': 'FALSE',
-               'A2DP disconnects': '3',
-               'A2DP Role (pri.)': 'slave',
-               'A2DP RSSI (pri.)': '-Touch'
-            }
-        """
-        logging.debug('Inside get_bt_status()...')
-        return self._get_status('GetBTStatus')
-
-    def get_conn_devices(self):
-        """Gets the BT connected devices.
-
-        Returns:
-            A dictionary of BT connected devices. eg.
-            {
-                'HFP Pri': 'xxxx',
-                'HFP Sec': 'xxxx',
-                'A2DP Pri': 'xxxx',
-                'A2DP Sec': 'xxxx',
-                'RFCOMM devices': 'xxxx',
-                'CTRL': 'xxxx',
-                'AUDIO': 'None',
-                'DEBUG': 'None',
-                'TRANS': 'None'
-             }
-
-        Raises:
-            ResponseError: If unexpected response occurs.
-        """
-        response_regex = re.compile('[0-9]+ .+: ')
-        connected_status = {}
-        response = self.cmd('GetConnDevices')
-        if not response:
-            raise ResponseError(
-                'No response returned by GetConnDevices command')
-        for line in response[0]:
-            if response_regex.search(line):
-                profile, value = line[line.find(' '):].split(':', 1)
-                connected_status[profile] = value
-        if not connected_status:
-            raise ResponseError('No BT Profile Status in response.')
-        return connected_status
-
-    def _get_status(self, cmd):
-        """Return a device status information."""
-        status = {}
-        try:
-            results = self.cmd(cmd)
-        except DeviceError as ex:
-            # logging.exception('{} cmd failed'.format(cmd))
-            logging.warning('Failed to get device status info.')
-            raise ex
-        results = results[0]
-        for result in results:
-            match = re.match(STATUS_REGEX, result)
-            if match:
-                key = match.group('key')
-                value = match.group('value')
-                status.update({key: value})
-        return status
-
-    def is_streaming(self):
-        """Returns the music streaming status on Apollo.
-
-        Returns:
-            Boolean: True if device is streaming music. False otherwise.
-        """
-
-        status = self.cmd('GetDSPStatus')
-        if any('active feature mask: 0' in log for log in
-               status[0]):
-            return False
-        elif any('active feature mask: 2' in log for log in
-                 status[0]):
-            return True
-        else:
-            return False
-
-    def is_in_call(self):
-        """Returns the phone call status on Apollo.
-
-        Returns:
-            Boolean: True if device has incoming call. False otherwise.
-        """
-
-        status = self.cmd('GetDSPStatus')
-        if not any('Inc' or 'out' in log for log in status[0]):
-            return False
-        return True
-
-    def is_device_limbo(self):
-        """Check if device is in Limbo state.
-
-        Returns:
-            Boolean: True if device is in limbo state, False otherwise.
-        """
-        device_state = self.get_device_state()
-        logging.info('BudsDevice "{}" state {}'.format(self.serial_number,
-                                                       device_state))
-        return device_state == 'limbo'
-
-    def get_device_state(self):
-        """Get state of the device.
-
-        Returns:
-            String representing the device state.
-
-        Raises:
-            DeviceError: If command fails.
-        """
-        _, status = self._cmd('GetDSPStatus')
-        for stat in status:
-            if isinstance(stat, dict):
-                logging.info(stat)
-                return stat['sink_state'].lower()
-        raise DeviceError('BudsDevice state not found in GetDSPStatus.')
-
-    def set_stay_connected(self, value):
-        """Run command to set the value for SetAlwaysConnected.
-
-        Args:
-            value: (int) 1 to keep connection engages at all time,
-                         0 for restoring
-        Returns:
-            the set state of type int (0 or 1) or None if not applicable
-        """
-
-        if int(self.version) >= 1663:
-            self._cmd('SetAlwaysConnected {}'.format(value))
-            logging.info('Setting sleep on idle to {}'.format(value))
-            return value
-
-    def get_codec(self):
-        """Get device's current audio codec.
-
-        Returns:
-            String representing the audio codec.
-
-        Raises:
-            DeviceError: If command fails.
-        """
-        success, status = self._cmd('get_codec')
-        logging.info('---------------------------------------')
-        logging.info(status)
-        logging.info('---------------------------------------')
-        if success:
-            for line in status:
-                if isinstance(line, dict):
-                    logging.info('Codec found: %s'.format(line['codec']))
-                    return line['codec']
-        raise DeviceError('BudsDevice state not found in get_codec.')
-
-    def crash_dump_detection(self):
-        """Reads crash dump determines if a crash is detected.
-
-        Returns:
-            True if crash detection is supported and if a new crash is found.
-            False otherwise.
-        """
-        # Detects if crashdump output is new
-        new_crash_regex = r'new crash = ([01]+)'
-        # filter crashdump for just the trace
-        crash_stack_regex = r'BASIC(.*)\n[\d]+ APP_STACK(.*)\n'
-        # remove time stamp commander output
-        timestamp_remover_regex = '\n[\\d]+ '
-
-        logging.debug('Inside IsCrashDumpDetection()...')
-        cmd_return = self.cmd('CrashDump', wait=1)
-        crash_dump_str = '\n'.join(cmd_return[0])
-        logging.info(crash_dump_str)
-        try:
-            # check for crash
-            match = re.search(new_crash_regex, crash_dump_str)
-            if match is not None:
-                if match.groups()[0] == '1':  # new crash found
-                    logging.error('Crash detected!!')
-                    basic, app_stack = re.search(crash_stack_regex,
-                                                 crash_dump_str,
-                                                 re.DOTALL).groups()
-                    # remove time stamps from capture
-                    basic = re.sub(timestamp_remover_regex, '', basic)
-                    app_stack = re.sub(timestamp_remover_regex, '', app_stack)
-                    # write to log
-                    # pylint: disable=bad-whitespace
-                    logging.info(
-                        '\n&270d = %s\n&270e = %s\n' % (basic, app_stack))
-                    # pylint: enable=bad-whitespace
-                    return True
-                else:  # no new crash
-                    logging.info('No crash detected')
-                    return False
-        except AttributeError:
-            logging.exception(
-                'Apollo crash dump output is not in expected format')
-            raise DeviceError('Apollo crash dump not in expected format')
-
-    @property
-    def version(self):
-        """Application version.
-
-        Returns:
-            (String) Firmware version.
-        """
-        _, result = self.get_version()
-        return result['build']
-
-    @property
-    def bluetooth_address(self):
-        """Bluetooth MAC address.
-
-        Returns:
-            a string representing 48bit BT MAC address in Hex.
-
-        Raises:
-            DeviceError: Unable to find BT Address
-        """
-        results = self.get_pskey('PSKEY_BDADDR')
-        if not results:
-            raise DeviceError('Unable to find BT Address')
-        logging.info(results)
-        # Bluetooth lower address part, upper address part and non-significant
-        # address part.
-        bt_lap = results[2:8]
-        bt_uap = results[10:12]
-        bt_nap = results[12:16]
-        results = bt_nap + bt_uap + bt_lap
-
-        return ':'.join(map(''.join, zip(*[iter(results)] * 2))).upper()
-
-    @property
-    def device_name(self):
-        """Device Friendly Name.
-
-        Returns:
-            a string representing device friendly name.
-
-        Raises:
-            DeviceError: Unable to find a wearable device name.
-        """
-        result = self.get_pskey('PSKEY_DEVICE_NAME')
-        if not result:
-            raise DeviceError('Unable to find BudsDevice Name')
-        logging.info(_to_ascii(result))
-        return _to_ascii(result)
-
-    @property
-    def stay_connected(self):
-        return self.stay_connected_state
-
-    @stay_connected.setter
-    def stay_connected(self, value):
-        self.stay_connected_state = self.set_stay_connected(value)
-
-    def read_serial_port(self, read_until=None):
-        """Read serial port until specified read_until value in seconds."""
-        # use default read_until value if not specified
-        if read_until:
-            time.sleep(read_until)
-        res = self.connection_handle.read()
-        buf_read = []
-        for line in res:
-            if apollo_log_decoder.is_automation_protobuf(line):
-                decoded = apollo_log_decoder.decode(line)
-                buf_read.append(decoded)
-            else:
-                buf_read.append(line)
-        return buf_read
-
-    def wait_for_reset(self, timeout=30):
-        """waits for the device to reset by check serial enumeration.
-
-        Checks every .5 seconds for the port.
-
-        Args:
-            timeout: The max time to wait for the device to disappear.
-
-        Returns:
-            Bool: True if the device reset was detected. False if not.
-        """
-        start_time = time.time()
-        while True:
-            res = subprocess.Popen(['ls', self.commander_port],
-                                   stdout=subprocess.PIPE,
-                                   stderr=subprocess.PIPE)
-            res.communicate()
-            if res.returncode != 0:
-                logging.info('BudsDevice reset detected')
-                return True
-            elif (time.time() - start_time) > timeout:
-                logging.info('Timeout waiting for device to reset.....')
-                return False
-            else:
-                time.sleep(.5)
-
-    def set_in_case(self, reconnect=True):
-        """Simulates setting apollo in case and wait for device to come up.
-
-        Args:
-            reconnect: bool - if method should block until reconnect
-        """
-        logging.info('Setting device in case')
-        out = self.send('Pow 2')
-        for i in out:
-            if 'No OTA wakeup condition' in i:
-                logging.info('No wake up condition.')
-            elif 'STM Wakeup 10s' in i:
-                logging.info('Wake up condition detected.')
-        if reconnect:
-            self.wait_for_reset()
-            self.reconnect()
-
-
-class ParentDevice(BudsDevice):
-    """Wrapper object for Device that addresses b10 recovery and build flashing.
-
-    Recovery mechanism:
-    In case a serial connection could not be established to b10, the recovery
-    mechanism is activated  ONLY if'recover_device' is set to 'true' and
-    b29_serial is defined in config file. This helps recover a device that has a
-    bad build installed.
-    """
-
-    def __init__(self, serial_number, recover_device=False, b29_serial=None):
-        # if recover device parameter is supplied and there is an error in
-        # instantiating B10 try to recover device instantiating b10 has to fail
-        # at most $tries_before_recovery time before initiating a recovery
-        # try to run the recovery at most $recovery_times before raising Error
-        # after the first recovery attempt failure try to reset b29 each
-        # iteration
-        self.b29_device = None
-        if recover_device:
-            if b29_serial is None:
-                logging.error('B29 serial not defined')
-                raise Error(
-                    'Recovery failed because "b29_serial" definition not '
-                    'present in device manifest file')
-            else:
-                self.b29_device = B29Device(b29_serial)
-            tries_before_recovery = 5
-            recovery_tries = 5
-            for attempt in range(tries_before_recovery):
-                try:
-                    # build crash symptoms varies based on the nature of the
-                    # crash connectError is thrown if the device never shows up
-                    # in /dev/ sometimes device shows and can connect but
-                    # sending commands fails or crashes apollo in that case,
-                    # DeviceError is thrown
-                    super().__init__(serial_number, commander_port=None,
-                                     log_port=None, serial_logger=None)
-                    break
-                except (ConnectError, DeviceError) as ex:
-                    logging.warning(
-                        'Error initializing apollo object - # of attempt '
-                        'left : %d' % (tries_before_recovery - attempt - 1))
-                    if attempt + 1 >= tries_before_recovery:
-                        logging.error(
-                            'Retries exhausted - now attempting to restore '
-                            'golden image')
-                        for recovery_attempt in range(recovery_tries):
-                            if not self.b29_device.restore_golden_image():
-                                logging.error('Recovery failed - retrying...')
-                                self.b29_device.reset_charger()
-                                continue
-                            # try to instantiate now
-                            try:
-                                super().__init__(serial_number,
-                                                 commander_port=None,
-                                                 log_port=None,
-                                                 serial_logger=None)
-                                break
-                            except (ConnectError, DeviceError):
-                                if recovery_attempt == recovery_tries - 1:
-                                    raise Error(
-                                        'Recovery failed - ensure that there '
-                                        'is no mismatching serial numbers of '
-                                        'b29 and b10 is specified in config')
-                                else:
-                                    logging.warning(
-                                        'Recovery attempt failed - retrying...')
-                    time.sleep(2)
-        else:
-            super().__init__(serial_number, commander_port=None, log_port=None,
-                             serial_logger=None)
-        # set this to prevent sleep
-        self.set_stay_connected(1)
-
-    def get_info(self):
-        information_dictionary = {}
-        information_dictionary['type'] = self.dut_type
-        information_dictionary['serial'] = self.serial_number
-        information_dictionary['log port'] = self.log_port
-        information_dictionary['command port'] = self.commander_port
-        information_dictionary['bluetooth address'] = self.bluetooth_address
-        success, build_dict = self.get_version()
-        information_dictionary['build'] = build_dict
-        # Extract the build number as a separate key. Useful for BigQuery.
-        information_dictionary['firmware build number'] = build_dict.get(
-            'build', '9999')
-        information_dictionary['name'] = self.device_name
-        if self.b29_device:
-            information_dictionary['b29 serial'] = self.b29_device.serial
-            information_dictionary['b29 firmware'] = self.b29_device.fw_version
-            information_dictionary['b29 commander port'] = self.b29_device.port
-            information_dictionary[
-                'b29 app version'] = self.b29_device.app_version
-        return information_dictionary
-
-    def setup(self, **kwargs):
-        """
-
-        Args:
-            apollo_build: if specified, will be used in flashing the device to
-                          that build prior to running any of the tests. If not
-                          specified flashing is skipped.
-        """
-        if 'apollo_build' in kwargs and kwargs['apollo_build'] is not None:
-            build = kwargs['apollo_build']
-            X20_REGEX = re.compile(r'/google/data/')
-            if not os.path.exists(build) or os.stat(build).st_size == 0:
-                # if x20 path, retry on file-not-found error or if file size is
-                # zero b/c X20 path does not update immediately
-                if X20_REGEX.match(build):
-                    for i in range(20):
-                        # wait until file exists and size is > 0 w/ 6 second
-                        # interval on retry
-                        if os.path.exists(build) and os.stat(build).st_size > 0:
-                            break
-
-                        if i == 19:
-                            logging.error('Build path (%s) does not exist or '
-                                          'file size is 0 - aborted' % build)
-
-                            raise Error('Specified build path (%s) does not '
-                                        'exist or file size is 0' % build)
-                        else:
-                            logging.warning('Build path (%s) does not exist or '
-                                            'file size is 0 - retrying...' %
-                                            build)
-                            time.sleep(6)
-                else:
-                    raise Error('Specified build path (%s) does not exist or '
-                                'file size is 0' % build)
-                self.flash_from_file(file_name=build, reconnect=True)
-        else:
-            logging.info('Not flashing apollo.')
-
-    def teardown(self, **kwargs):
-        self.close()
-
-
-def _evt_hex(cmd):
-    return 'EvtHex ' + apollo_sink_events.SINK_EVENTS[cmd]
-
-
-def _to_ascii(orig):
-    # Returned value need to be byte swapped. Remove last octet if it is 0.
-    result = _byte_swap(orig)
-    result = result[:-2] if result[-2:] == '00' else result
-    return bytearray.fromhex(result).decode()
-
-
-def _byte_swap(orig):
-    """Simple function to swap bytes order.
-
-    Args:
-        orig: original string
-
-    Returns:
-        a string with bytes swapped.
-        eg. orig = '6557276920736952006f'.
-        After swap, return '57656927732052696f00'
-    """
-    return ''.join(
-        sum([(c, d, a, b) for a, b, c, d in zip(*[iter(orig)] * 4)], ()))
diff --git a/src/antlion/controllers/buds_lib/apollo_utils.py b/src/antlion/controllers/buds_lib/apollo_utils.py
deleted file mode 100644
index 98c9be8..0000000
--- a/src/antlion/controllers/buds_lib/apollo_utils.py
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-from antlion import signals
-from antlion.controllers.buds_lib import apollo_lib
-
-AVRCP_WAIT_TIME = 3
-
-
-def get_serial_object(pri_ad, serial_device):
-    """This function will creates object for serial device connected.
-
-    Args:
-        pri_ad: Android device.
-        serial_device: serial device connected.
-
-    Returns:
-        object of serial device, otherwise Abort the class.
-    """
-    devices = apollo_lib.get_devices()
-    for device in devices:
-        if device['serial_number'] in serial_device:
-            return apollo_lib.BudsDevice(device['serial_number'])
-    pri_ad.log.error('Apollo device not found')
-    raise signals.TestAbortAll('Apollo device not found')
-
-
-def avrcp_actions(pri_ad, buds_device):
-    """Performs avrcp controls like volume up, volume down
-
-    Args:
-        pri_ad: Android device.
-        buds_device: serial device object to perform avrcp actions.
-
-    Returns:
-        True if successful, otherwise otherwise raises Exception.
-    """
-    pri_ad.log.debug("Setting voume to 0")
-    pri_ad.droid.setMediaVolume(0)
-    current_volume = pri_ad.droid.getMediaVolume()
-    pri_ad.log.info('Current volume to {}'.format(current_volume))
-    for _ in range(5):
-        buds_device.volume('Up')
-        time.sleep(AVRCP_WAIT_TIME)
-    pri_ad.log.info('Volume increased to {}'.format(
-        pri_ad.droid.getMediaVolume()))
-    if current_volume == pri_ad.droid.getMediaVolume():
-        pri_ad.log.error('Increase volume failed')
-        raise signals.TestFailure("Increase volume failed")
-    current_volume = pri_ad.droid.getMediaVolume()
-    for _ in range(5):
-        buds_device.volume('Down')
-        time.sleep(AVRCP_WAIT_TIME)
-    pri_ad.log.info('Volume decreased to {}'.format(
-        pri_ad.droid.getMediaVolume()))
-    if current_volume == pri_ad.droid.getMediaVolume():
-        pri_ad.log.error('Decrease volume failed')
-        raise signals.TestFailure("Decrease volume failed")
-    return True
diff --git a/src/antlion/controllers/buds_lib/b29_lib.py b/src/antlion/controllers/buds_lib/b29_lib.py
deleted file mode 100644
index 32839c1..0000000
--- a/src/antlion/controllers/buds_lib/b29_lib.py
+++ /dev/null
@@ -1,221 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Class definition of B29 device for controlling the device.
-
-B29 is an engineering device with serial capabilities. It is almost like
-b20 except it has additional features that allow sending commands
-to b10 via one-wire and to pull logs from b10 via one-wire.
-
-Please see https://docs.google.com/document/d/17yJeJRNWxv5E9
-fBvw0sXkgwCBkshU_l4SxWkKgAxVmk/edit for details about available operations.
-"""
-
-import os
-import re
-import time
-from logging import Logger
-
-from antlion import utils
-from antlion.controllers.buds_lib import tako_trace_logger
-
-logging = tako_trace_logger.TakoTraceLogger(Logger(__file__))
-DEVICE_REGEX = (
-    r'_(?P<device_serial>[A-Z0-9]+)-(?P<interface>\w+)\s->\s'
-    r'(\.\./){2}(?P<port>\w+)'
-)
-# TODO: automate getting the latest version from x20
-DEBUG_BRIDGE = ('/google/data/ro/teams/wearables/apollo/ota/jenkins-presubmit/'
-                'ovyalov/master/apollo-sw/CL14060_v2-build13686/v13686/'
-                'automation/apollo_debug_bridge/linux2/apollo_debug_bridge')
-B29_CHIP = 'Cypress_Semiconductor_USBUART'
-
-
-# TODO:
-# as the need arises, additional functionalities of debug_bridge should be
-# integrated
-# TODO:
-# https://docs.google.com/document/d/17yJeJRNWxv5E9fBvw0sXkgwCBkshU_
-# l4SxWkKgAxVmk/edit
-
-class B29Error(Exception):
-    """Module Level Error."""
-
-
-def get_b29_devices():
-    """ Get all available B29 devices.
-
-    Returns:
-      (list) A list of available devices (ex: ['/dev/ttyACM4',...]) or empty
-      list if none found
-    """
-    devices = []
-    result = os.popen('ls -l /dev/serial/by-id/*%s*' % B29_CHIP).read()
-    for line in result.splitlines():
-        match = re.search(DEVICE_REGEX, line)
-        device_serial = match.group('device_serial')
-        log_port = None
-        commander_port = '/dev/' + match.group('port')
-        device = {
-            'commander_port': commander_port,
-            'log_port': log_port,
-            'serial_number': device_serial
-        }
-        devices.append(device)
-    return devices
-
-
-class B29Device(object):
-    """Class to control B29 device."""
-
-    def __init__(self, b29_serial):
-        """ Class to control B29 device
-        Args: String type of serial number (ex: 'D96045152F121B00'
-        """
-        self.serial = b29_serial
-        b29_port = [d['commander_port'] for d in get_b29_devices() if
-                    d['serial_number'] == b29_serial]
-        if not b29_port:
-            logging.error("unable to find b29 with serial number %s" %
-                          b29_serial)
-            raise B29Error(
-                "Recovery failed because b29_serial specified in device "
-                "manifest file is not found or invalid")
-        self.port = b29_port[0]
-        self.ping_match = {'psoc': r'Pings: tx=[\d]* rx=[1-9][0-9]',
-                           'csr': r'count=100, sent=[\d]*, received=[1-9][0-9]',
-                           'charger': r'Pings: tx=[\d]* rx=[1-9][0-9]'}
-        self.fw_version = self._get_version('fw')
-        self.app_version = self._get_version('app')
-
-    def _get_version(self, type='fw'):
-        """ Method to get version of B29
-        Returns:
-            String version if found (ex: '0006'), None otherwise
-        """
-        command = '--serial={}'.format(self.port)
-        debug_bridge_process = self._send_command(command=command)
-        if type == 'fw':
-            version_match = re.compile(r'CHARGER app version: version=([\d]*)')
-        elif type == 'app':
-            version_match = re.compile(r'APP VERSION: ([\d]*)')
-        version_str = self._parse_output_of_running_process(
-            debug_bridge_process, version_match)
-        debug_bridge_process.kill()
-        if version_str:
-            match = version_match.search(version_str)
-            version = match.groups()[0]
-            return version
-        return None
-
-    def _parse_output_of_running_process(self, subprocess, match, timeout=30):
-        """ Parses the logs from subprocess objects and checks to see if a
-        match is found within the allotted time
-        Args:
-            subprocess: object returned by _send_command (which is the same as
-            bject returned by subprocess.Popen()) match: regex match object
-            (what is returned by re.compile(r'<regex>') timeout: int - time to
-            keep retrying before bailing
-
-        """
-        start_time = time.time()
-        success_match = re.compile(match)
-        while start_time + timeout > time.time():
-            out = subprocess.stderr.readline()
-            if success_match.search(out):
-                return out
-            time.sleep(.5)
-        return False
-
-    def _send_command(self, command):
-        """ Send command to b29 using apollo debug bridge
-        Args:
-          command: The command for apollo debug to execute
-        Returns:
-          subprocess object
-        """
-        return utils.start_standing_subprocess(
-            '{} {} {}'.format(DEBUG_BRIDGE, '--rpc_port=-1', command),
-            shell=True)
-
-    def restore_golden_image(self):
-        """ Start a subprocess that calls the debug-bridge executable with
-        options that restores golden image of b10 attached to the b29. The
-        recovery restores the 'golden image' which is available in b10 partition
-         8. The process runs for 120 seconds which is adequate time for the
-         recovery to have completed.
-        """
-        # TODO:
-        # because we are accessing x20, we need to capture error resulting from
-        #  expired prodaccess and report it explicitly
-        # TODO:
-        # possibly file not found error?
-
-        # start the process, wait for two minutes and kill it
-        logging.info('Restoring golden image...')
-        command = '--serial=%s --debug_spi=dfu --sqif_partition=8' % self.port
-        debug_bridge_process = self._send_command(command=command)
-        success_match = re.compile('DFU on partition #8 successfully initiated')
-        if self._parse_output_of_running_process(debug_bridge_process,
-                                                 success_match):
-            logging.info('Golden image restored successfully')
-            debug_bridge_process.kill()
-            return True
-        logging.warning('Failed to restore golden image')
-        debug_bridge_process.kill()
-        return False
-
-    def ping_component(self, component, timeout=30):
-        """ Send ping to the specified component via B290
-        Args:
-            component = 'csr' or 'psoc' or 'charger'
-        Returns:
-            True if successful and False otherwise
-        """
-        if component not in ('csr', 'psoc', 'charger'):
-            raise B29Error('specified parameter for component is not valid')
-        logging.info('Pinging %s via B29...' % component)
-        command = '--serial={} --ping={}'.format(self.port, component)
-        debug_bridge_process = self._send_command(command=command)
-        if self._parse_output_of_running_process(debug_bridge_process,
-                                                 self.ping_match[component],
-                                                 timeout):
-            logging.info('Ping passes')
-            debug_bridge_process.kill()
-            return True
-        else:
-            logging.warning('Ping failed')
-            debug_bridge_process.kill()
-            return False
-
-    def reset_charger(self):
-        """ Send reset command to B29
-        Raises: TimeoutError (lib.utils.TimeoutError) if the device does not
-        come back within 120 seconds
-        """
-        # --charger_reset
-        if int(self.fw_version) >= 6:
-            logging.info('Resetting B29')
-            command = '--serial={} --charger_reset'.format(self.port)
-            reset_charger_process = self._send_command(command=command)
-            time.sleep(2)
-            reset_charger_process.kill()
-            logging.info('Waiting for B29 to become available..')
-            utils.wait_until(lambda: self.ping_component('charger'), 120)
-        else:
-            logging.warning('B20 firmware version %s does not support '
-                            'charger_reset argument' % self.fw_version)
diff --git a/src/antlion/controllers/buds_lib/dev_utils/__init__.py b/src/antlion/controllers/buds_lib/dev_utils/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/buds_lib/dev_utils/apollo_log_decoder.py b/src/antlion/controllers/buds_lib/dev_utils/apollo_log_decoder.py
deleted file mode 100644
index fafb05a..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/apollo_log_decoder.py
+++ /dev/null
@@ -1,77 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Decodes the protobufs described in go/apollo-qa-tracing-design."""
-
-import base64
-import binascii
-import struct
-
-from antlion.controllers.buds_lib.dev_utils.proto.gen import apollo_qa_pb2
-from antlion.controllers.buds_lib.dev_utils.proto.gen import audiowear_pb2
-
-
-def to_dictionary(proto):
-    proto_dic = {}
-    msg = [element.split(':') for element in str(proto).split('\n') if element]
-    for element in msg:
-        key = element[0].strip()
-        value = element[1].strip()
-        proto_dic[key] = value
-    return proto_dic
-
-
-def is_automation_protobuf(logline):
-    return logline.startswith('QA_MSG|')
-
-
-def decode(logline):
-    """Decode the logline.
-
-    Args:
-      logline: String line with the encoded message.
-
-    Returns:
-      String value with the decoded message.
-    """
-    decoded = None
-    decoders = {'HEX': binascii.unhexlify, 'B64': base64.decodebytes}
-    msgs = {
-        apollo_qa_pb2.TRACE:
-            apollo_qa_pb2.ApolloQATrace,
-        apollo_qa_pb2.GET_VER_RESPONSE:
-            apollo_qa_pb2.ApolloQAGetVerResponse,
-        apollo_qa_pb2.GET_CODEC_RESPONSE:
-            apollo_qa_pb2.ApolloQAGetCodecResponse,
-        apollo_qa_pb2.GET_DSP_STATUS_RESPONSE:
-            apollo_qa_pb2.ApolloQAGetDspStatusResponse,
-    }
-
-    if is_automation_protobuf(logline):
-        _, encoding, message = logline.split("|", 2)
-        message = message.rstrip()
-        if encoding in decoders.keys():
-            message = decoders[encoding](message)
-            header = message[0:4]
-            serialized = message[4:]
-            if len(header) == 4 and len(serialized) == len(message) - 4:
-                msg_group, msg_type, msg_len = struct.unpack('>BBH', header)
-                if (len(serialized) == msg_len and
-                        msg_group == audiowear_pb2.APOLLO_QA):
-                    proto = msgs[msg_type]()
-                    proto.ParseFromString(serialized)
-                    decoded = to_dictionary(proto)
-    return decoded
diff --git a/src/antlion/controllers/buds_lib/dev_utils/apollo_log_regex.py b/src/antlion/controllers/buds_lib/dev_utils/apollo_log_regex.py
deleted file mode 100644
index b4dd58a..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/apollo_log_regex.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Apollo's event logs regexp for each button action."""
-
-EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)\r\n')
-VOL_CHANGE_REGEX = (
-  r'(?P<time_stamp>\d+)\sVolume = (?P<vol_level>\d+)(.*)\r\n')
-VOLUP_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)3202(.*)\r\n')
-VOLDOWN_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)320a(.*)\r\n')
-AVRCP_PLAY_REGEX = (r'(?P<time_stamp>\d+)\sAVRCP '
-                    r'play\r\n')
-AVRCP_PAUSE_REGEX = (r'(?P<time_stamp>\d+)\sAVRCP '
-                     r'paused\r\n')
-MIC_OPEN_EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)'
-  r'\[3206\](.*)\r\n')
-MIC_CLOSE_EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)'
-  r'\[3207\](.*)\r\n')
-PREV_TRACK_EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)'
-  r'\[3208\](.*)\r\n')
-PREV_CHANNEL_EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)'
-  r'\[3209\](.*)\r\n')
-NEXT_TRACK_EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)'
-  r'\[3200\](.*)\r\n')
-NEXT_CHANNEL_EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)'
-  r'\[3201\](.*)\r\n')
-FETCH_NOTIFICATION_EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)'
-  r'\[3205\](.*)\r\n')
-VOICE_CMD_COMPLETE_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])\sDspOnVoiceCommandComplete\r\n')
-VOICE_CMD_START_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])\sDspStartVoiceCommand(.*)\r\n')
-MIC_OPEN_PROMT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)AudioPromptPlay 33(.*)\r\n')
-MIC_CLOSE_PROMT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)AudioPromptPlay 34(.*)\r\n')
-POWER_ON_EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z]) --hello--(.*)PowerOn(.*)\r\n')
-POWER_OFF_EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z]) EvtAW:320d(.*)\r\n')
diff --git a/src/antlion/controllers/buds_lib/dev_utils/apollo_sink_events.py b/src/antlion/controllers/buds_lib/dev_utils/apollo_sink_events.py
deleted file mode 100644
index fcba1fd..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/apollo_sink_events.py
+++ /dev/null
@@ -1,221 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Apollo PS Keys and User Sink Events."""
-
-# Persistent Store (PS) Keys from rio_all_merged.psr.
-
-PSKEY = {
-    'PSKEY_BDADDR': '0x0001',
-    'PSKEY_DEVICE_NAME': '0x0108',
-    'PSKEY_DEEP_SLEEP_STATE': '0x0229',
-    'PSKEY_USB_VERSION': '0x02bc',
-    'PSKEY_USB_DEVICE_CLASS_CODES': '0x02bd',
-    'PSKEY_USB_VENDOR_ID': '0x02be',
-    'PSKEY_USB_PRODUCT_ID': '0x02bf',
-    'PSKEY_USB_PRODUCT_STRING': '0x02c2',
-    'PSKEY_USB_SERIAL_NUMBER_STRING': '0x02c3',
-    'PSKEY_EARCON_VERSION': '0x28b'
-}
-
-# Rio logging features from rio_log.h.
-
-LOG_FEATURES = {
-    'ALL': '0xffff',
-    'VERBOSE': '0x0001',
-    'TEST': '0x0002',
-    'CSR': '0x0004',
-    'DEBUG': '0x0008',
-    'INFO': '0x0010',
-    'ERROR': '0x0020',
-    'TIME_STAMP': '0x0040',
-}
-
-# Supported events from sink_events.h."""
-
-SINK_EVENTS = {
-    'EventUsrMicOpen': '0x3206',
-    'EventUsrMicClose': '0x3207',
-    'EventUsrPowerOn': '0x4001',
-    'EventUsrPowerOff': '0x4002',
-    'EventUsrEnterPairing': '0x4003',
-    'EventUsrInitateVoiceDial': '0x4004',
-    'EventUsrInitateVoiceDial_AG2': '0x4005',
-    'EventUsrLastNumberRedial': '0x4006',
-    'EventUsrLastNumberRedial_AG2': '0x4007',
-    'EventUsrAnswer': '0x4008',
-    'EventUsrReject': '0x4009',
-    'EventUsrCancelEnd': '0x400A',
-    'EventUsrTransferToggle': '0x400B',
-    'EventUsrMuteToggle': '0x400C',
-    'EventUsrVolumeUp': '0x400D',
-    'EventUsrVolumeDown': '0x400E',
-    'EventUsrVolumeToggle': '0x400F',
-    'EventUsrThreeWayReleaseAllHeld': '0x4010',
-    'EventUsrThreeWayAcceptWaitingReleaseActive': '0x4011',
-    'EventUsrThreeWayAcceptWaitingHoldActive': '0x4012',
-    'EventUsrThreeWayAddHeldTo3Way': '0x4013',
-    'EventUsrThreeWayConnect2Disconnect': '0x4014',
-    'EventUsrLedsOnOffToggle': '0x4015',
-    'EventUsrLedsOn': '0x4016',
-    'EventUsrLedsOff': '0x4017',
-    'EventUsrEstablishSLC': '0x4018',
-    'EventUsrMuteOn': '0x4019',
-    'EventUsrMuteOff': '0x401A',
-    'EventUsrEnterTXContTestMode': '0x401B',
-    'EventUsrEnterDUTState': '0x401C',
-    'EventUsrBassBoostEnableDisableToggle': '0x401D',
-    'EventUsrPlaceIncomingCallOnHold': '0x401E',
-    'EventUsrAcceptHeldIncomingCall': '0x401F',
-    'EventUsrRejectHeldIncomingCall': '0x4020',
-    'EventUsrEnterDFUMode': '0x4021',
-    'EventUsrEnterDriverlessDFUMode': '0x4022',
-    'EventUsrEnterServiceMode': '0x4023',
-    'EventUsrAudioPromptsOn': '0x4024',
-    'EventUsrAudioPromptsOff': '0x4025',
-    'EventUsrDialStoredNumber': '0x4026',
-    'EventUsrUpdateStoredNumber': '0x4027',
-    'EventUsrRestoreDefaults': '0x4028',
-    'EventUsrConfirmationAccept': '0x4029',
-    'EventUsrConfirmationReject': '0x402A',
-    'EventUsrSelectAudioPromptLanguageMode': '0x402B',
-    'EventUsrSwitchAudioMode': '0x402F',
-    'EventUsrButtonLockingOn': '0x4030',
-    'EventUsrButtonLockingOff': '0x4031',
-    'EventUsrButtonLockingToggle': '0x4032',
-    'EventUsrRssiPair': '0x4034',
-    'EventUsrBassBoostOn': '0x4035',
-    'EventUsrBassBoostOff': '0x4036',
-    'EventUsr3DEnhancementOn': '0x4037',
-    'EventUsr3DEnhancementOff': '0x4038',
-    'EventUsrSelectAudioSourceNext': '0x4039',
-    'EventUsrSelectAudioSourceAnalog': '0x403A',
-    'EventUsrSelectAudioSourceUSB': '0x403B',
-    'EventUsrSelectAudioSourceAG1': '0x403C',
-    'EventUsrSelectAudioSourceAG2': '0x403D',
-    'EventUsrSelectFMAudioSource': '0x403E',
-    'EventUsrSelectAudioSourceNone': '0x403F',
-    'EventUsrPbapDialIch': '0x4040',
-    'EventUsrPbapDialMch': '0x4041',
-    'EventUsrIntelligentPowerManagementOn': '0x4042',
-    'EventUsrIntelligentPowerManagementOff': '0x4043',
-    'EventUsrIntelligentPowerManagementToggle': '0x4044',
-    'EventUsrAvrcpPlayPause': '0x4045',
-    'EventUsrAvrcpStop': '0x4046',
-    'EventUsrAvrcpSkipForward': '0x4047',
-    'EventUsrAvrcpSkipBackward': '0x4048',
-    'EventUsrAvrcpFastForwardPress': '0x4049',
-    'EventUsrAvrcpFastForwardRelease': '0x404A',
-    'EventUsrAvrcpRewindPress': '0x404B',
-    'EventUsrAvrcpRewindRelease': '0x404C',
-    'EventUsrAvrcpShuffleOff': '0x404D',
-    'EventUsrAvrcpShuffleAllTrack': '0x404E',
-    'EventUsrAvrcpShuffleGroup': '0x404F',
-    'EventUsrAvrcpRepeatOff': '0x4050',
-    'EventUsrAvrcpRepeatSingleTrack': '0x4051',
-    'EventUsrAvrcpRepeatAllTrack': '0x4052',
-    'EventUsrAvrcpRepeatGroup': '0x4053',
-    'EventUsrAvrcpPlay': '0x4054',
-    'EventUsrAvrcpPause': '0x4055',
-    'EventUsrAvrcpToggleActive': '0x4056',
-    'EventUsrAvrcpNextGroupPress': '0x4057',
-    'EventUsrAvrcpPreviousGroupPress': '0x4058',
-    'EventUsrPbapSetPhonebook': '0x4059',
-    'EventUsrPbapBrowseEntry': '0x405A',
-    'EventUsrPbapBrowseList': '0x405B',
-    'EventUsrPbapDownloadPhonebook': '0x405C',
-    'EventUsrPbapSelectPhonebookObject': '0x405D',
-    'EventUsrPbapBrowseComplete': '0x405E',
-    'EventUsrPbapGetPhonebookSize': '0x405F',
-    'EventUsrUsbPlayPause': '0x4060',
-    'EventUsrUsbStop': '0x4061',
-    'EventUsrUsbFwd': '0x4062',
-    'EventUsrUsbBack': '0x4063',
-    'EventUsrUsbMute': '0x4064',
-    'EventUsrUsbLowPowerMode': '0x4065',
-    'EventUsrTestModeAudio': '0x4066',
-    'EventUsrTestModeTone': '0x4067',
-    'EventUsrTestModeKey': '0x4068',
-    'EventUsrTestDefrag': '0x4069',
-    'EventUsrDebugKeysToggle': '0x406A',
-    'EventUsrSpeechRecognitionTuningStart': '0x406B',
-    'EventUsrWbsTestSetCodecs': '0x406C',
-    'EventUsrWbsTestOverrideResponse': '0x406D',
-    'EventUsrWbsTestSetCodecsSendBAC': '0x406E',
-    'EventUsrCreateAudioConnection': '0x406F',
-    'EventUsrSwapA2dpMediaChannel': '0x407F',
-    'EventUsrExternalMicConnected': '0x4080',
-    'EventUsrExternalMicDisconnected': '0x4081',
-    'EventUsrSSROn': '0x4082',
-    'EventUsrSSROff': '0x4083',
-    'EventUsrPeerSessionInquire': '0x4089',
-    'EventUsrPeerSessionConnDisc': '0x408A',
-    'EventUsrPeerSessionEnd': '0x408B',
-    'EventUsrBatteryLevelRequest': '0x408C',
-    'EventUsrVolumeOrientationNormal': '0x408D',
-    'EventUsrVolumeOrientationInvert': '0x408E',
-    'EventUsrResetPairedDeviceList': '0x408F',
-    'EventUsrEnterDutMode': '0x4090',
-    'EventUsr3DEnhancementEnableDisableToggle': '0x4091',
-    'EventUsrRCVolumeUp': '0x4092',
-    'EventUsrRCVolumeDown': '0x4093',
-    'EventUsrEnterBootMode2': '0x4094',
-    'EventUsrChargerConnected': '0x4095',
-    'EventUsrChargerDisconnected': '0x4096',
-    'EventUsrSubwooferDisconnect': '0x4097',
-    'EventUsrAnalogAudioConnected': '0x4098',
-    'EventUsrAnalogAudioDisconnected': '0x4099',
-    'EventUsrGaiaDFURequest': '0x409A',
-    'EventUsrStartIRLearningMode': '0x409B',
-    'EventUsrStopIRLearningMode': '0x409C',
-    'EventUsrClearIRCodes': '0x409D',
-    'EventUsrUserEqOn': '0x409E',
-    'EventUsrUserEqOff': '0x409F',
-    'EventUsrUserEqOnOffToggle': '0x40A0',
-    'EventUsrSpdifAudioConnected': '0x40AD',
-    'EventUsrSpdifAudioDisconnected': '0x40AE',
-    'EventUsrSelectAudioSourceSpdif': '0x40AF',
-    'EventUsrChangeAudioRouting': '0x40B0',
-    'EventUsrMasterDeviceTrimVolumeUp': '0x40B1',
-    'EventUsrMasterDeviceTrimVolumeDown': '0x40B2',
-    'EventUsrSlaveDeviceTrimVolumeUp': '0x40B3',
-    'EventUsrSlaveDeviceTrimVolumeDown': '0x40B4',
-    'EventUsrEstablishPeerConnection': '0x40B5',
-    'EventUsrTwsQualificationEnablePeerOpen': '0x40B7',
-    'EventUsrBleStartBonding': '0x40D0',
-    'EventUsrBleSwitchPeripheral': '0x40D1',
-    'EventUsrBleSwitchCentral': '0x40D2',
-    'EventUsrImmAlertStop': '0x40D3',
-    'EventUsrLlsAlertStop': '0x40D4',
-    'EventUsrFindMyRemoteImmAlertMild': '0x40D5',
-    'EventUsrFindMyRemoteImmAlertHigh': '0x40D6',
-    'EventUsrFindMyPhoneImmAlertMild': '0x40D7',
-    'EventUsrFindMyPhoneImmAlertHigh': '0x40D8',
-    'EventUsrFindMyPhoneRemoteImmAlertHigh': '0x40D9',
-    'EventUsrPartyModeOn': '0x40DA',
-    'EventUsrPartyModeOff': '0x40DB',
-    'EventUsrBleDeleteDevice': '0x40EC',
-    'EventUsrAvrcpNextGroupRelease': '0x40ED',
-    'EventUsrAvrcpPreviousGroupRelease': '0x40EE',
-    'EventUsrTwsQualificationVolUp': '0x40EF',
-    'EventUsrTwsQualificationVolDown': '0x40F0',
-    'EventUsrStartA2DPStream': '0x40F1',
-    'EventUsrPbapBrowseListByName': '0x40F2',
-    'EventUsrTwsQualificationSetAbsVolume': '0x40F3',
-    'EventUsrTwsQualificationPlayTrack': '0x40F4',
-    'EventUsrBleHidExtraConfig': '0x40F5',
-    'EventUsrTwsQualificationAVRCPConfigureDataSize': '0x40F6',
-}
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/Makefile b/src/antlion/controllers/buds_lib/dev_utils/proto/Makefile
deleted file mode 100644
index 8509cd6..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/Makefile
+++ /dev/null
@@ -1,4 +0,0 @@
-all: nanopb_pb2.py plugin_pb2.py
-
-%_pb2.py: %.proto
-	protoc -I. --python_out=. $<
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/apollo_qa.proto b/src/antlion/controllers/buds_lib/dev_utils/proto/apollo_qa.proto
deleted file mode 100644
index 0db32e9..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/apollo_qa.proto
+++ /dev/null
@@ -1,175 +0,0 @@
-/* Forward & backward compatibility practices must be followed.  This means:
-   a) Never re-use an enum if the semantics are different.
-   b) Never alter the semantic meaning.  If needed, simply deprecate
-      old message/field/enum & start using new one.  If deprecated message
-      is no longer used in code make sure to communicate this to QA.
-   c) Prefer 3-stage migrations if possible: add new message/enum/field & start
-      sending it, make sure QA has switched to new message/enum/field, remove old
-      message/enum/field.
-   Fields should start out required unless they are optional in the code.  They should
-   become optional when deprecated (make sure to update QA automation first to expect the
-   field to be optional) or removed.  Make sure to never re-use field ids unless
-   the field is being resurrected.
- */
-syntax = "proto2";
-//package audiowear;
-package apollo.lib.apollo_dev_util_lib.proto;
-import "nanopb.proto";
-//import "third_party/nanopb/nanopb.proto";
-
-
-option java_package = "com.google.android.bisto.nano";
-option java_outer_classname = "ApolloQA";
-
-enum ApolloQAMessageType {
-  TRACE = 1;
-  GET_VER_RESPONSE = 2;
-  GET_CODEC_RESPONSE = 3;
-  GET_DSP_STATUS_RESPONSE = 4;
-  FACTORY_PLAY_SOUND = 5;
-  FACTORY_INFO_REQUEST = 6;
-  FACTORY_INFO_RESPONSE = 7;
-}
-enum TraceId {
-  // data[0] = partition# erased.
-  OTA_ERASE_PARTITION = 1;
-  // data[0] = partition# we will write to.
-  // data[1] = expected length we will write to partition
-  OTA_START_PARTITION_WRITE = 2;
-  // data[0] = partition# written
-  // data[1] = actual written length
-  OTA_FINISHED_PARTITION_WRITE = 3;
-  // our custom signature validation has begun
-  OTA_SIGNATURE_START = 4;
-  // our custom signature validation rejected the image
-  OTA_SIGNATURE_FAILURE = 5;
-  // triggering CSR to reboot & apply DFU
-  OTA_TRIGGERING_LOADER = 6;
-  // the CSR loader rejected the image
-  OTA_LOADER_VERIFY_FAILED = 7;
-  // progress update.
-  // data[0] = num bytes received
-  // data[1] = num bytes expected
-  OTA_PROGRESS = 8;
-  OTA_ABORTED = 9;
-  // up to 10: reserved for OTA
-  // data[0] = AvrcpPlayStatus as integer
-  AVRCP_PLAY_STATUS_CHANGE = 10;
-  /* data[0] = new volume level (int16 stored in uint32)
-     data[1] = new volume level in dB (int16 stored in uint32)
-     data[2] = previous volume level (int16 stored in uint32)
-     easy conversion in python:
-       new_volume = ctpyes.c_short(ctypes.c_uint(data[0]).value).value
-       new_volume_db = ctpyes.c_short(ctypes.c_uint(data[1]).value).value
-  */
-  VOLUME_CHANGE = 11;
-  /* data[0] = entry number of command */
-  COMMANDER_RECV_COMMAND = 12;
-  COMMANDER_FINISH_COMMAND = 13;
-  /* currently no information about the rejected command */
-  COMMANDER_REJECT_COMMAND = 14;
-}
-/* Note: FWD_SEEK & REV_SEEK are bit-wise or'ed into the status.
-   Use SEEK_MASK first to read the seek or mask it out to get the other
-   states).  Do not & with individual states for comparison as aside from
-   seek the other states are not a bitwise-mask.
-*/
-enum AvrcpPlayStatus {
-  STOPPED = 0;
-  PLAYING = 1;
-  PAUSED = 2;
-  FWD_SEEK = 8;
-  REV_SEEK = 16;
-  ERROR = 5;
-  /* & this with the status to compare against FWD_SEEK/REV_SEEK.
-     & with the complement of this value to get the other states */
-  SEEK_MASK = 24;
-}
-/* These messages are internal trace-points for QA to do whitebox validation.
-   However, developers should feel free to remove trace-points if they
-   no longer make sense (but communicate to QA to prep this ahead-of-time). */
-message ApolloQATrace {
-  // all messages should have a timestamp field and it will get auto-populated
-  // (no need to set it at the call-site).
-  required uint32 timestamp = 1;
-  // this is the event that occured internally
-  required TraceId id = 2;
-  // this is some optional data that depends on the traceid.
-  // if not documented then no fields will be written.
-  repeated uint32 data = 3 [packed = true, (nanopb).max_count = 5];
-}
-enum PreviousBootStatus {
-  OTA_SUCCESS = 1; /* previous boot OK */
-  OTA_ERROR = 2; /* previous OTA failed */
-}
-//Next ID: 10
-message ApolloQAGetVerResponse {
-  required uint32 timestamp = 1;
-  required uint32 csr_fw_version = 2; // not implemented for now
-  required bool csr_fw_debug_build = 3; // not implemented for now
-  required uint32 vm_build_number = 4;
-  required bool vm_debug_build = 5;
-  required uint32 psoc_version = 6;
-  // the build label sent to AGSA. This should just be an amalgum of the broken-down
-  // info above. Aside from X.Y.Z prefix, do not parse this for anything as it is
-  // free-form text.
-  required string build_label = 7 [(nanopb).max_size = 32];
-  // if not set then info wasn't available.
-  optional PreviousBootStatus last_ota_status = 8;
-  required uint32 charger_version = 9;
-  optional uint32 expected_psoc_version = 10;
-}
-enum ApolloQAA2dpCodec {
-  AAC = 1;
-  SBC = 2;
-}
-message ApolloQAGetCodecResponse {
-  required uint32 timestamp = 1;
-  // if not set then unknown codec (error).
-  optional ApolloQAA2dpCodec codec = 2;
-}
-enum SinkState {
-  LIMBO = 0;
-  CONNECTABLE = 1;
-  DISCOVERABLE = 2;
-  CONNECTED = 3;
-  OUTGOING_CALLS_ESTABLISH = 4;
-  INCOMING_CALLS_ESTABLISH = 5;
-  ACTIVE_CALL_SCO = 6;
-  TEST_MODE = 7;
-  THREE_WAY_CALL_WAITING = 8;
-  THREE_WAY_CALL_ON_HOLD = 9;
-  THREE_WAY_MULTICALL = 10;
-  INCOMING_CALL_ON_HOLD = 11;
-  ACTIVE_CALL_NO_SCO = 12;
-  A2DP_STREAMING = 13;
-  DEVICE_LOW_BATTERY = 14;
-}
-message ApolloQAGetDspStatusResponse {
-  required uint32 timestamp = 1;
-  required bool is_dsp_loaded = 2;
-  required SinkState sink_state = 3;
-  /**
-   * This is a bit-wise mask.
-   * see AudioWearFeatureBits audiowear_plugin_state.h
-   * for the definition.
-   */
-  required uint32 features_active = 4;
-}
-message ApolloQAFactoryPlaySound {
-  enum PromptType {
-    PROMPT_TYPE_BT_CONNECTED = 1;
-    PROMPT_TYPE_IN_EAR = 2;
-  }
-  required PromptType prompt = 1;
-}
-message ApolloQAFactoryInfoRequest {
-}
-message ApolloQAFactoryInfoResponse {
-  required uint32 timestamp = 1;
-  optional int32 crystal_trim = 2 [(nanopb).int_size = IS_16];
-  optional bool crash_dump_exists = 3;
-  optional bool is_developer_mode_enabled = 4;
-  optional bool is_always_connected = 5;
-  optional uint32 hwid = 6;
-}
\ No newline at end of file
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/audiowear.proto b/src/antlion/controllers/buds_lib/dev_utils/proto/audiowear.proto
deleted file mode 100644
index 8f825bd..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/audiowear.proto
+++ /dev/null
@@ -1,25 +0,0 @@
-syntax = "proto2";
-
-//package audiowear;
-package apollo.lib.apollo_dev_util_lib.proto;
-
-option java_package = "com.google.android.bisto.nano";
-option java_outer_classname = "Protocol";
-
-enum MessageGroup {
-    UNKNOWN_MESSAGE_GROUP = 0;
-    DEVICE_INPUT = 1;
-    OTA = 2;
-    DEVICE_CAPABILITY = 3;
-    DEVICE_STATUS = 4;
-    LOGGING = 5;
-    SENSORS = 6;
-    COMPANION_STATUS = 7;
-    DEVICE_COMMAND = 8;
-    BISTO_SETTINGS = 9;
-    WELLNESS = 10;
-    TEST = 11;
-    BLE_SERVICE = 12;
-    APOLLO_QA = 126;
-    TRANSLATE = 127;
-}
\ No newline at end of file
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/apollo_qa_pb2.py b/src/antlion/controllers/buds_lib/dev_utils/proto/gen/apollo_qa_pb2.py
deleted file mode 100644
index fefcfe4..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/apollo_qa_pb2.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: apollo_qa.proto
-"""Generated protocol buffer code."""
-from google.protobuf.internal import builder as _builder
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import descriptor_pool as _descriptor_pool
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-import nanopb_pb2 as nanopb__pb2
-
-
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0f\x61pollo_qa.proto\x12$apollo.lib.apollo_dev_util_lib.proto\x1a\x0cnanopb.proto\"t\n\rApolloQATrace\x12\x11\n\ttimestamp\x18\x01 \x02(\r\x12\x39\n\x02id\x18\x02 \x02(\x0e\x32-.apollo.lib.apollo_dev_util_lib.proto.TraceId\x12\x15\n\x04\x64\x61ta\x18\x03 \x03(\rB\x07\x10\x01\x92?\x02\x10\x05\"\xcd\x02\n\x16\x41polloQAGetVerResponse\x12\x11\n\ttimestamp\x18\x01 \x02(\r\x12\x16\n\x0e\x63sr_fw_version\x18\x02 \x02(\r\x12\x1a\n\x12\x63sr_fw_debug_build\x18\x03 \x02(\x08\x12\x17\n\x0fvm_build_number\x18\x04 \x02(\r\x12\x16\n\x0evm_debug_build\x18\x05 \x02(\x08\x12\x14\n\x0cpsoc_version\x18\x06 \x02(\r\x12\x1a\n\x0b\x62uild_label\x18\x07 \x02(\tB\x05\x92?\x02\x08 \x12Q\n\x0flast_ota_status\x18\x08 \x01(\x0e\x32\x38.apollo.lib.apollo_dev_util_lib.proto.PreviousBootStatus\x12\x17\n\x0f\x63harger_version\x18\t \x02(\r\x12\x1d\n\x15\x65xpected_psoc_version\x18\n \x01(\r\"u\n\x18\x41polloQAGetCodecResponse\x12\x11\n\ttimestamp\x18\x01 \x02(\r\x12\x46\n\x05\x63odec\x18\x02 \x01(\x0e\x32\x37.apollo.lib.apollo_dev_util_lib.proto.ApolloQAA2dpCodec\"\xa6\x01\n\x1c\x41polloQAGetDspStatusResponse\x12\x11\n\ttimestamp\x18\x01 \x02(\r\x12\x15\n\ris_dsp_loaded\x18\x02 \x02(\x08\x12\x43\n\nsink_state\x18\x03 \x02(\x0e\x32/.apollo.lib.apollo_dev_util_lib.proto.SinkState\x12\x17\n\x0f\x66\x65\x61tures_active\x18\x04 \x02(\r\"\xb9\x01\n\x18\x41polloQAFactoryPlaySound\x12Y\n\x06prompt\x18\x01 \x02(\x0e\x32I.apollo.lib.apollo_dev_util_lib.proto.ApolloQAFactoryPlaySound.PromptType\"B\n\nPromptType\x12\x1c\n\x18PROMPT_TYPE_BT_CONNECTED\x10\x01\x12\x16\n\x12PROMPT_TYPE_IN_EAR\x10\x02\"\x1c\n\x1a\x41polloQAFactoryInfoRequest\"\xb6\x01\n\x1b\x41polloQAFactoryInfoResponse\x12\x11\n\ttimestamp\x18\x01 \x02(\r\x12\x1b\n\x0c\x63rystal_trim\x18\x02 \x01(\x05\x42\x05\x92?\x02\x38\x10\x12\x19\n\x11\x63rash_dump_exists\x18\x03 \x01(\x08\x12!\n\x19is_developer_mode_enabled\x18\x04 \x01(\x08\x12\x1b\n\x13is_always_connected\x18\x05 \x01(\x08\x12\x0c\n\x04hwid\x18\x06 \x01(\r*\xb8\x01\n\x13\x41polloQAMessageType\x12\t\n\x05TRACE\x10\x01\x12\x14\n\x10GET_VER_RESPONSE\x10\x02\x12\x16\n\x12GET_CODEC_RESPONSE\x10\x03\x12\x1b\n\x17GET_DSP_STATUS_RESPONSE\x10\x04\x12\x16\n\x12\x46\x41\x43TORY_PLAY_SOUND\x10\x05\x12\x18\n\x14\x46\x41\x43TORY_INFO_REQUEST\x10\x06\x12\x19\n\x15\x46\x41\x43TORY_INFO_RESPONSE\x10\x07*\xfc\x02\n\x07TraceId\x12\x17\n\x13OTA_ERASE_PARTITION\x10\x01\x12\x1d\n\x19OTA_START_PARTITION_WRITE\x10\x02\x12 \n\x1cOTA_FINISHED_PARTITION_WRITE\x10\x03\x12\x17\n\x13OTA_SIGNATURE_START\x10\x04\x12\x19\n\x15OTA_SIGNATURE_FAILURE\x10\x05\x12\x19\n\x15OTA_TRIGGERING_LOADER\x10\x06\x12\x1c\n\x18OTA_LOADER_VERIFY_FAILED\x10\x07\x12\x10\n\x0cOTA_PROGRESS\x10\x08\x12\x0f\n\x0bOTA_ABORTED\x10\t\x12\x1c\n\x18\x41VRCP_PLAY_STATUS_CHANGE\x10\n\x12\x11\n\rVOLUME_CHANGE\x10\x0b\x12\x1a\n\x16\x43OMMANDER_RECV_COMMAND\x10\x0c\x12\x1c\n\x18\x43OMMANDER_FINISH_COMMAND\x10\r\x12\x1c\n\x18\x43OMMANDER_REJECT_COMMAND\x10\x0e*m\n\x0f\x41vrcpPlayStatus\x12\x0b\n\x07STOPPED\x10\x00\x12\x0b\n\x07PLAYING\x10\x01\x12\n\n\x06PAUSED\x10\x02\x12\x0c\n\x08\x46WD_SEEK\x10\x08\x12\x0c\n\x08REV_SEEK\x10\x10\x12\t\n\x05\x45RROR\x10\x05\x12\r\n\tSEEK_MASK\x10\x18*4\n\x12PreviousBootStatus\x12\x0f\n\x0bOTA_SUCCESS\x10\x01\x12\r\n\tOTA_ERROR\x10\x02*%\n\x11\x41polloQAA2dpCodec\x12\x07\n\x03\x41\x41\x43\x10\x01\x12\x07\n\x03SBC\x10\x02*\xd8\x02\n\tSinkState\x12\t\n\x05LIMBO\x10\x00\x12\x0f\n\x0b\x43ONNECTABLE\x10\x01\x12\x10\n\x0c\x44ISCOVERABLE\x10\x02\x12\r\n\tCONNECTED\x10\x03\x12\x1c\n\x18OUTGOING_CALLS_ESTABLISH\x10\x04\x12\x1c\n\x18INCOMING_CALLS_ESTABLISH\x10\x05\x12\x13\n\x0f\x41\x43TIVE_CALL_SCO\x10\x06\x12\r\n\tTEST_MODE\x10\x07\x12\x1a\n\x16THREE_WAY_CALL_WAITING\x10\x08\x12\x1a\n\x16THREE_WAY_CALL_ON_HOLD\x10\t\x12\x17\n\x13THREE_WAY_MULTICALL\x10\n\x12\x19\n\x15INCOMING_CALL_ON_HOLD\x10\x0b\x12\x16\n\x12\x41\x43TIVE_CALL_NO_SCO\x10\x0c\x12\x12\n\x0e\x41\x32\x44P_STREAMING\x10\r\x12\x16\n\x12\x44\x45VICE_LOW_BATTERY\x10\x0e\x42)\n\x1d\x63om.google.android.bisto.nanoB\x08\x41polloQA')
-
-_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
-_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'apollo_qa_pb2', globals())
-if _descriptor._USE_C_DESCRIPTORS == False:
-
-  DESCRIPTOR._options = None
-  DESCRIPTOR._serialized_options = b'\n\035com.google.android.bisto.nanoB\010ApolloQA'
-  _APOLLOQATRACE.fields_by_name['data']._options = None
-  _APOLLOQATRACE.fields_by_name['data']._serialized_options = b'\020\001\222?\002\020\005'
-  _APOLLOQAGETVERRESPONSE.fields_by_name['build_label']._options = None
-  _APOLLOQAGETVERRESPONSE.fields_by_name['build_label']._serialized_options = b'\222?\002\010 '
-  _APOLLOQAFACTORYINFORESPONSE.fields_by_name['crystal_trim']._options = None
-  _APOLLOQAFACTORYINFORESPONSE.fields_by_name['crystal_trim']._serialized_options = b'\222?\0028\020'
-  _APOLLOQAMESSAGETYPE._serialized_start=1217
-  _APOLLOQAMESSAGETYPE._serialized_end=1401
-  _TRACEID._serialized_start=1404
-  _TRACEID._serialized_end=1784
-  _AVRCPPLAYSTATUS._serialized_start=1786
-  _AVRCPPLAYSTATUS._serialized_end=1895
-  _PREVIOUSBOOTSTATUS._serialized_start=1897
-  _PREVIOUSBOOTSTATUS._serialized_end=1949
-  _APOLLOQAA2DPCODEC._serialized_start=1951
-  _APOLLOQAA2DPCODEC._serialized_end=1988
-  _SINKSTATE._serialized_start=1991
-  _SINKSTATE._serialized_end=2335
-  _APOLLOQATRACE._serialized_start=71
-  _APOLLOQATRACE._serialized_end=187
-  _APOLLOQAGETVERRESPONSE._serialized_start=190
-  _APOLLOQAGETVERRESPONSE._serialized_end=523
-  _APOLLOQAGETCODECRESPONSE._serialized_start=525
-  _APOLLOQAGETCODECRESPONSE._serialized_end=642
-  _APOLLOQAGETDSPSTATUSRESPONSE._serialized_start=645
-  _APOLLOQAGETDSPSTATUSRESPONSE._serialized_end=811
-  _APOLLOQAFACTORYPLAYSOUND._serialized_start=814
-  _APOLLOQAFACTORYPLAYSOUND._serialized_end=999
-  _APOLLOQAFACTORYPLAYSOUND_PROMPTTYPE._serialized_start=933
-  _APOLLOQAFACTORYPLAYSOUND_PROMPTTYPE._serialized_end=999
-  _APOLLOQAFACTORYINFOREQUEST._serialized_start=1001
-  _APOLLOQAFACTORYINFOREQUEST._serialized_end=1029
-  _APOLLOQAFACTORYINFORESPONSE._serialized_start=1032
-  _APOLLOQAFACTORYINFORESPONSE._serialized_end=1214
-# @@protoc_insertion_point(module_scope)
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/audiowear_pb2.py b/src/antlion/controllers/buds_lib/dev_utils/proto/gen/audiowear_pb2.py
deleted file mode 100644
index 123a079..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/audiowear_pb2.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: audiowear.proto
-"""Generated protocol buffer code."""
-from google.protobuf.internal import builder as _builder
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import descriptor_pool as _descriptor_pool
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0f\x61udiowear.proto\x12$apollo.lib.apollo_dev_util_lib.proto*\x8d\x02\n\x0cMessageGroup\x12\x19\n\x15UNKNOWN_MESSAGE_GROUP\x10\x00\x12\x10\n\x0c\x44\x45VICE_INPUT\x10\x01\x12\x07\n\x03OTA\x10\x02\x12\x15\n\x11\x44\x45VICE_CAPABILITY\x10\x03\x12\x11\n\rDEVICE_STATUS\x10\x04\x12\x0b\n\x07LOGGING\x10\x05\x12\x0b\n\x07SENSORS\x10\x06\x12\x14\n\x10\x43OMPANION_STATUS\x10\x07\x12\x12\n\x0e\x44\x45VICE_COMMAND\x10\x08\x12\x12\n\x0e\x42ISTO_SETTINGS\x10\t\x12\x0c\n\x08WELLNESS\x10\n\x12\x08\n\x04TEST\x10\x0b\x12\x0f\n\x0b\x42LE_SERVICE\x10\x0c\x12\r\n\tAPOLLO_QA\x10~\x12\r\n\tTRANSLATE\x10\x7f\x42)\n\x1d\x63om.google.android.bisto.nanoB\x08Protocol')
-
-_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
-_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'audiowear_pb2', globals())
-if _descriptor._USE_C_DESCRIPTORS == False:
-
-  DESCRIPTOR._options = None
-  DESCRIPTOR._serialized_options = b'\n\035com.google.android.bisto.nanoB\010Protocol'
-  _MESSAGEGROUP._serialized_start=58
-  _MESSAGEGROUP._serialized_end=327
-# @@protoc_insertion_point(module_scope)
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/google/protobuf/descriptor_pb2.py b/src/antlion/controllers/buds_lib/dev_utils/proto/gen/google/protobuf/descriptor_pb2.py
deleted file mode 100644
index dd9775c..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/google/protobuf/descriptor_pb2.py
+++ /dev/null
@@ -1,1482 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/descriptor.proto
-"""Generated protocol buffer code."""
-from google.protobuf.internal import builder as _builder
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import descriptor_pool as _descriptor_pool
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-if _descriptor._USE_C_DESCRIPTORS == False:
-  DESCRIPTOR = _descriptor.FileDescriptor(
-    name='google/protobuf/descriptor.proto',
-    package='google.protobuf',
-    syntax='proto2',
-    serialized_options=None,
-    create_key=_descriptor._internal_create_key,
-    serialized_pb=b'\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xdb\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x39\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfo\x12\x0e\n\x06syntax\x18\x0c \x01(\t\"\xe4\x03\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x39\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProto\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x1a,\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"\xa9\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"$\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x8c\x01\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\xc1\x01\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\x12\x1f\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lse\"\xcc\x04\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12,\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08:\x05\x66\x61lse\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xe6\x01\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xa0\x02\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x8d\x01\n\x0b\x45numOptions\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"}\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"{\n\x0eServiceOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"z\n\rMethodOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9e\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xb1\x01\n\x0eSourceCodeInfo\x12:\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.Location\x1a\x63\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\tB)\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01'
-  )
-else:
-  DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xdb\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x39\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfo\x12\x0e\n\x06syntax\x18\x0c \x01(\t\"\xe4\x03\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x39\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProto\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x1a,\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"\xa9\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"$\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x8c\x01\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\xc1\x01\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\x12\x1f\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lse\"\xcc\x04\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12,\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08:\x05\x66\x61lse\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xe6\x01\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xa0\x02\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x8d\x01\n\x0b\x45numOptions\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"}\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"{\n\x0eServiceOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"z\n\rMethodOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9e\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xb1\x01\n\x0eSourceCodeInfo\x12:\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.Location\x1a\x63\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\tB)\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01')
-
-if _descriptor._USE_C_DESCRIPTORS == False:
-  _FIELDDESCRIPTORPROTO_TYPE = _descriptor.EnumDescriptor(
-    name='Type',
-    full_name='google.protobuf.FieldDescriptorProto.Type',
-    filename=None,
-    file=DESCRIPTOR,
-    create_key=_descriptor._internal_create_key,
-    values=[
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_DOUBLE', index=0, number=1,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_FLOAT', index=1, number=2,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_INT64', index=2, number=3,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_UINT64', index=3, number=4,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_INT32', index=4, number=5,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_FIXED64', index=5, number=6,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_FIXED32', index=6, number=7,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_BOOL', index=7, number=8,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_STRING', index=8, number=9,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_GROUP', index=9, number=10,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_MESSAGE', index=10, number=11,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_BYTES', index=11, number=12,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_UINT32', index=12, number=13,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_ENUM', index=13, number=14,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_SFIXED32', index=14, number=15,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_SFIXED64', index=15, number=16,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_SINT32', index=16, number=17,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_SINT64', index=17, number=18,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-    ],
-    containing_type=None,
-    serialized_options=None,
-  )
-  _sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_TYPE)
-
-  _FIELDDESCRIPTORPROTO_LABEL = _descriptor.EnumDescriptor(
-    name='Label',
-    full_name='google.protobuf.FieldDescriptorProto.Label',
-    filename=None,
-    file=DESCRIPTOR,
-    create_key=_descriptor._internal_create_key,
-    values=[
-      _descriptor.EnumValueDescriptor(
-        name='LABEL_OPTIONAL', index=0, number=1,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='LABEL_REQUIRED', index=1, number=2,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='LABEL_REPEATED', index=2, number=3,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-    ],
-    containing_type=None,
-    serialized_options=None,
-  )
-  _sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_LABEL)
-
-  _FILEOPTIONS_OPTIMIZEMODE = _descriptor.EnumDescriptor(
-    name='OptimizeMode',
-    full_name='google.protobuf.FileOptions.OptimizeMode',
-    filename=None,
-    file=DESCRIPTOR,
-    create_key=_descriptor._internal_create_key,
-    values=[
-      _descriptor.EnumValueDescriptor(
-        name='SPEED', index=0, number=1,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='CODE_SIZE', index=1, number=2,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='LITE_RUNTIME', index=2, number=3,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-    ],
-    containing_type=None,
-    serialized_options=None,
-  )
-  _sym_db.RegisterEnumDescriptor(_FILEOPTIONS_OPTIMIZEMODE)
-
-  _FIELDOPTIONS_CTYPE = _descriptor.EnumDescriptor(
-    name='CType',
-    full_name='google.protobuf.FieldOptions.CType',
-    filename=None,
-    file=DESCRIPTOR,
-    create_key=_descriptor._internal_create_key,
-    values=[
-      _descriptor.EnumValueDescriptor(
-        name='STRING', index=0, number=0,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='CORD', index=1, number=1,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='STRING_PIECE', index=2, number=2,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-    ],
-    containing_type=None,
-    serialized_options=None,
-  )
-  _sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_CTYPE)
-
-
-  _FILEDESCRIPTORSET = _descriptor.Descriptor(
-    name='FileDescriptorSet',
-    full_name='google.protobuf.FileDescriptorSet',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='file', full_name='google.protobuf.FileDescriptorSet.file', index=0,
-        number=1, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _FILEDESCRIPTORPROTO = _descriptor.Descriptor(
-    name='FileDescriptorProto',
-    full_name='google.protobuf.FileDescriptorProto',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name', full_name='google.protobuf.FileDescriptorProto.name', index=0,
-        number=1, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='package', full_name='google.protobuf.FileDescriptorProto.package', index=1,
-        number=2, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='dependency', full_name='google.protobuf.FileDescriptorProto.dependency', index=2,
-        number=3, type=9, cpp_type=9, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='public_dependency', full_name='google.protobuf.FileDescriptorProto.public_dependency', index=3,
-        number=10, type=5, cpp_type=1, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='weak_dependency', full_name='google.protobuf.FileDescriptorProto.weak_dependency', index=4,
-        number=11, type=5, cpp_type=1, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='message_type', full_name='google.protobuf.FileDescriptorProto.message_type', index=5,
-        number=4, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='enum_type', full_name='google.protobuf.FileDescriptorProto.enum_type', index=6,
-        number=5, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='service', full_name='google.protobuf.FileDescriptorProto.service', index=7,
-        number=6, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='extension', full_name='google.protobuf.FileDescriptorProto.extension', index=8,
-        number=7, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='options', full_name='google.protobuf.FileDescriptorProto.options', index=9,
-        number=8, type=11, cpp_type=10, label=1,
-        has_default_value=False, default_value=None,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='source_code_info', full_name='google.protobuf.FileDescriptorProto.source_code_info', index=10,
-        number=9, type=11, cpp_type=10, label=1,
-        has_default_value=False, default_value=None,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='syntax', full_name='google.protobuf.FileDescriptorProto.syntax', index=11,
-        number=12, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _DESCRIPTORPROTO_EXTENSIONRANGE = _descriptor.Descriptor(
-    name='ExtensionRange',
-    full_name='google.protobuf.DescriptorProto.ExtensionRange',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='start', full_name='google.protobuf.DescriptorProto.ExtensionRange.start', index=0,
-        number=1, type=5, cpp_type=1, label=1,
-        has_default_value=False, default_value=0,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='end', full_name='google.protobuf.DescriptorProto.ExtensionRange.end', index=1,
-        number=2, type=5, cpp_type=1, label=1,
-        has_default_value=False, default_value=0,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-  _DESCRIPTORPROTO = _descriptor.Descriptor(
-    name='DescriptorProto',
-    full_name='google.protobuf.DescriptorProto',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name', full_name='google.protobuf.DescriptorProto.name', index=0,
-        number=1, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='field', full_name='google.protobuf.DescriptorProto.field', index=1,
-        number=2, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='extension', full_name='google.protobuf.DescriptorProto.extension', index=2,
-        number=6, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='nested_type', full_name='google.protobuf.DescriptorProto.nested_type', index=3,
-        number=3, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='enum_type', full_name='google.protobuf.DescriptorProto.enum_type', index=4,
-        number=4, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='extension_range', full_name='google.protobuf.DescriptorProto.extension_range', index=5,
-        number=5, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='oneof_decl', full_name='google.protobuf.DescriptorProto.oneof_decl', index=6,
-        number=8, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='options', full_name='google.protobuf.DescriptorProto.options', index=7,
-        number=7, type=11, cpp_type=10, label=1,
-        has_default_value=False, default_value=None,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[_DESCRIPTORPROTO_EXTENSIONRANGE, ],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _FIELDDESCRIPTORPROTO = _descriptor.Descriptor(
-    name='FieldDescriptorProto',
-    full_name='google.protobuf.FieldDescriptorProto',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name', full_name='google.protobuf.FieldDescriptorProto.name', index=0,
-        number=1, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='number', full_name='google.protobuf.FieldDescriptorProto.number', index=1,
-        number=3, type=5, cpp_type=1, label=1,
-        has_default_value=False, default_value=0,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='label', full_name='google.protobuf.FieldDescriptorProto.label', index=2,
-        number=4, type=14, cpp_type=8, label=1,
-        has_default_value=False, default_value=1,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='type', full_name='google.protobuf.FieldDescriptorProto.type', index=3,
-        number=5, type=14, cpp_type=8, label=1,
-        has_default_value=False, default_value=1,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='type_name', full_name='google.protobuf.FieldDescriptorProto.type_name', index=4,
-        number=6, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='extendee', full_name='google.protobuf.FieldDescriptorProto.extendee', index=5,
-        number=2, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='default_value', full_name='google.protobuf.FieldDescriptorProto.default_value', index=6,
-        number=7, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='oneof_index', full_name='google.protobuf.FieldDescriptorProto.oneof_index', index=7,
-        number=9, type=5, cpp_type=1, label=1,
-        has_default_value=False, default_value=0,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='options', full_name='google.protobuf.FieldDescriptorProto.options', index=8,
-        number=8, type=11, cpp_type=10, label=1,
-        has_default_value=False, default_value=None,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-      _FIELDDESCRIPTORPROTO_TYPE,
-      _FIELDDESCRIPTORPROTO_LABEL,
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _ONEOFDESCRIPTORPROTO = _descriptor.Descriptor(
-    name='OneofDescriptorProto',
-    full_name='google.protobuf.OneofDescriptorProto',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name', full_name='google.protobuf.OneofDescriptorProto.name', index=0,
-        number=1, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _ENUMDESCRIPTORPROTO = _descriptor.Descriptor(
-    name='EnumDescriptorProto',
-    full_name='google.protobuf.EnumDescriptorProto',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name', full_name='google.protobuf.EnumDescriptorProto.name', index=0,
-        number=1, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='value', full_name='google.protobuf.EnumDescriptorProto.value', index=1,
-        number=2, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='options', full_name='google.protobuf.EnumDescriptorProto.options', index=2,
-        number=3, type=11, cpp_type=10, label=1,
-        has_default_value=False, default_value=None,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _ENUMVALUEDESCRIPTORPROTO = _descriptor.Descriptor(
-    name='EnumValueDescriptorProto',
-    full_name='google.protobuf.EnumValueDescriptorProto',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name', full_name='google.protobuf.EnumValueDescriptorProto.name', index=0,
-        number=1, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='number', full_name='google.protobuf.EnumValueDescriptorProto.number', index=1,
-        number=2, type=5, cpp_type=1, label=1,
-        has_default_value=False, default_value=0,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='options', full_name='google.protobuf.EnumValueDescriptorProto.options', index=2,
-        number=3, type=11, cpp_type=10, label=1,
-        has_default_value=False, default_value=None,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _SERVICEDESCRIPTORPROTO = _descriptor.Descriptor(
-    name='ServiceDescriptorProto',
-    full_name='google.protobuf.ServiceDescriptorProto',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name', full_name='google.protobuf.ServiceDescriptorProto.name', index=0,
-        number=1, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='method', full_name='google.protobuf.ServiceDescriptorProto.method', index=1,
-        number=2, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='options', full_name='google.protobuf.ServiceDescriptorProto.options', index=2,
-        number=3, type=11, cpp_type=10, label=1,
-        has_default_value=False, default_value=None,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _METHODDESCRIPTORPROTO = _descriptor.Descriptor(
-    name='MethodDescriptorProto',
-    full_name='google.protobuf.MethodDescriptorProto',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name', full_name='google.protobuf.MethodDescriptorProto.name', index=0,
-        number=1, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='input_type', full_name='google.protobuf.MethodDescriptorProto.input_type', index=1,
-        number=2, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='output_type', full_name='google.protobuf.MethodDescriptorProto.output_type', index=2,
-        number=3, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='options', full_name='google.protobuf.MethodDescriptorProto.options', index=3,
-        number=4, type=11, cpp_type=10, label=1,
-        has_default_value=False, default_value=None,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='client_streaming', full_name='google.protobuf.MethodDescriptorProto.client_streaming', index=4,
-        number=5, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='server_streaming', full_name='google.protobuf.MethodDescriptorProto.server_streaming', index=5,
-        number=6, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _FILEOPTIONS = _descriptor.Descriptor(
-    name='FileOptions',
-    full_name='google.protobuf.FileOptions',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='java_package', full_name='google.protobuf.FileOptions.java_package', index=0,
-        number=1, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='java_outer_classname', full_name='google.protobuf.FileOptions.java_outer_classname', index=1,
-        number=8, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='java_multiple_files', full_name='google.protobuf.FileOptions.java_multiple_files', index=2,
-        number=10, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='java_generate_equals_and_hash', full_name='google.protobuf.FileOptions.java_generate_equals_and_hash', index=3,
-        number=20, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='java_string_check_utf8', full_name='google.protobuf.FileOptions.java_string_check_utf8', index=4,
-        number=27, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='optimize_for', full_name='google.protobuf.FileOptions.optimize_for', index=5,
-        number=9, type=14, cpp_type=8, label=1,
-        has_default_value=True, default_value=1,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='go_package', full_name='google.protobuf.FileOptions.go_package', index=6,
-        number=11, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='cc_generic_services', full_name='google.protobuf.FileOptions.cc_generic_services', index=7,
-        number=16, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='java_generic_services', full_name='google.protobuf.FileOptions.java_generic_services', index=8,
-        number=17, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='py_generic_services', full_name='google.protobuf.FileOptions.py_generic_services', index=9,
-        number=18, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='deprecated', full_name='google.protobuf.FileOptions.deprecated', index=10,
-        number=23, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='cc_enable_arenas', full_name='google.protobuf.FileOptions.cc_enable_arenas', index=11,
-        number=31, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='uninterpreted_option', full_name='google.protobuf.FileOptions.uninterpreted_option', index=12,
-        number=999, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-      _FILEOPTIONS_OPTIMIZEMODE,
-    ],
-    serialized_options=None,
-    is_extendable=True,
-    syntax='proto2',
-    extension_ranges=[(1000, 536870912), ],
-    oneofs=[
-    ],
-  )
-
-
-  _MESSAGEOPTIONS = _descriptor.Descriptor(
-    name='MessageOptions',
-    full_name='google.protobuf.MessageOptions',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='message_set_wire_format', full_name='google.protobuf.MessageOptions.message_set_wire_format', index=0,
-        number=1, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='no_standard_descriptor_accessor', full_name='google.protobuf.MessageOptions.no_standard_descriptor_accessor', index=1,
-        number=2, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='deprecated', full_name='google.protobuf.MessageOptions.deprecated', index=2,
-        number=3, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='map_entry', full_name='google.protobuf.MessageOptions.map_entry', index=3,
-        number=7, type=8, cpp_type=7, label=1,
-        has_default_value=False, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='uninterpreted_option', full_name='google.protobuf.MessageOptions.uninterpreted_option', index=4,
-        number=999, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=True,
-    syntax='proto2',
-    extension_ranges=[(1000, 536870912), ],
-    oneofs=[
-    ],
-  )
-
-
-  _FIELDOPTIONS = _descriptor.Descriptor(
-    name='FieldOptions',
-    full_name='google.protobuf.FieldOptions',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='ctype', full_name='google.protobuf.FieldOptions.ctype', index=0,
-        number=1, type=14, cpp_type=8, label=1,
-        has_default_value=True, default_value=0,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='packed', full_name='google.protobuf.FieldOptions.packed', index=1,
-        number=2, type=8, cpp_type=7, label=1,
-        has_default_value=False, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='lazy', full_name='google.protobuf.FieldOptions.lazy', index=2,
-        number=5, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='deprecated', full_name='google.protobuf.FieldOptions.deprecated', index=3,
-        number=3, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='weak', full_name='google.protobuf.FieldOptions.weak', index=4,
-        number=10, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='uninterpreted_option', full_name='google.protobuf.FieldOptions.uninterpreted_option', index=5,
-        number=999, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-      _FIELDOPTIONS_CTYPE,
-    ],
-    serialized_options=None,
-    is_extendable=True,
-    syntax='proto2',
-    extension_ranges=[(1000, 536870912), ],
-    oneofs=[
-    ],
-  )
-
-
-  _ENUMOPTIONS = _descriptor.Descriptor(
-    name='EnumOptions',
-    full_name='google.protobuf.EnumOptions',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='allow_alias', full_name='google.protobuf.EnumOptions.allow_alias', index=0,
-        number=2, type=8, cpp_type=7, label=1,
-        has_default_value=False, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='deprecated', full_name='google.protobuf.EnumOptions.deprecated', index=1,
-        number=3, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='uninterpreted_option', full_name='google.protobuf.EnumOptions.uninterpreted_option', index=2,
-        number=999, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=True,
-    syntax='proto2',
-    extension_ranges=[(1000, 536870912), ],
-    oneofs=[
-    ],
-  )
-
-
-  _ENUMVALUEOPTIONS = _descriptor.Descriptor(
-    name='EnumValueOptions',
-    full_name='google.protobuf.EnumValueOptions',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='deprecated', full_name='google.protobuf.EnumValueOptions.deprecated', index=0,
-        number=1, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='uninterpreted_option', full_name='google.protobuf.EnumValueOptions.uninterpreted_option', index=1,
-        number=999, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=True,
-    syntax='proto2',
-    extension_ranges=[(1000, 536870912), ],
-    oneofs=[
-    ],
-  )
-
-
-  _SERVICEOPTIONS = _descriptor.Descriptor(
-    name='ServiceOptions',
-    full_name='google.protobuf.ServiceOptions',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='deprecated', full_name='google.protobuf.ServiceOptions.deprecated', index=0,
-        number=33, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='uninterpreted_option', full_name='google.protobuf.ServiceOptions.uninterpreted_option', index=1,
-        number=999, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=True,
-    syntax='proto2',
-    extension_ranges=[(1000, 536870912), ],
-    oneofs=[
-    ],
-  )
-
-
-  _METHODOPTIONS = _descriptor.Descriptor(
-    name='MethodOptions',
-    full_name='google.protobuf.MethodOptions',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='deprecated', full_name='google.protobuf.MethodOptions.deprecated', index=0,
-        number=33, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='uninterpreted_option', full_name='google.protobuf.MethodOptions.uninterpreted_option', index=1,
-        number=999, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=True,
-    syntax='proto2',
-    extension_ranges=[(1000, 536870912), ],
-    oneofs=[
-    ],
-  )
-
-
-  _UNINTERPRETEDOPTION_NAMEPART = _descriptor.Descriptor(
-    name='NamePart',
-    full_name='google.protobuf.UninterpretedOption.NamePart',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name_part', full_name='google.protobuf.UninterpretedOption.NamePart.name_part', index=0,
-        number=1, type=9, cpp_type=9, label=2,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='is_extension', full_name='google.protobuf.UninterpretedOption.NamePart.is_extension', index=1,
-        number=2, type=8, cpp_type=7, label=2,
-        has_default_value=False, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-  _UNINTERPRETEDOPTION = _descriptor.Descriptor(
-    name='UninterpretedOption',
-    full_name='google.protobuf.UninterpretedOption',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name', full_name='google.protobuf.UninterpretedOption.name', index=0,
-        number=2, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='identifier_value', full_name='google.protobuf.UninterpretedOption.identifier_value', index=1,
-        number=3, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='positive_int_value', full_name='google.protobuf.UninterpretedOption.positive_int_value', index=2,
-        number=4, type=4, cpp_type=4, label=1,
-        has_default_value=False, default_value=0,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='negative_int_value', full_name='google.protobuf.UninterpretedOption.negative_int_value', index=3,
-        number=5, type=3, cpp_type=2, label=1,
-        has_default_value=False, default_value=0,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='double_value', full_name='google.protobuf.UninterpretedOption.double_value', index=4,
-        number=6, type=1, cpp_type=5, label=1,
-        has_default_value=False, default_value=float(0),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='string_value', full_name='google.protobuf.UninterpretedOption.string_value', index=5,
-        number=7, type=12, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"",
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='aggregate_value', full_name='google.protobuf.UninterpretedOption.aggregate_value', index=6,
-        number=8, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[_UNINTERPRETEDOPTION_NAMEPART, ],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _SOURCECODEINFO_LOCATION = _descriptor.Descriptor(
-    name='Location',
-    full_name='google.protobuf.SourceCodeInfo.Location',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='path', full_name='google.protobuf.SourceCodeInfo.Location.path', index=0,
-        number=1, type=5, cpp_type=1, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='span', full_name='google.protobuf.SourceCodeInfo.Location.span', index=1,
-        number=2, type=5, cpp_type=1, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='leading_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_comments', index=2,
-        number=3, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='trailing_comments', full_name='google.protobuf.SourceCodeInfo.Location.trailing_comments', index=3,
-        number=4, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-  _SOURCECODEINFO = _descriptor.Descriptor(
-    name='SourceCodeInfo',
-    full_name='google.protobuf.SourceCodeInfo',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='location', full_name='google.protobuf.SourceCodeInfo.location', index=0,
-        number=1, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[_SOURCECODEINFO_LOCATION, ],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-  _FILEDESCRIPTORSET.fields_by_name['file'].message_type = _FILEDESCRIPTORPROTO
-  _FILEDESCRIPTORPROTO.fields_by_name['message_type'].message_type = _DESCRIPTORPROTO
-  _FILEDESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO
-  _FILEDESCRIPTORPROTO.fields_by_name['service'].message_type = _SERVICEDESCRIPTORPROTO
-  _FILEDESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO
-  _FILEDESCRIPTORPROTO.fields_by_name['options'].message_type = _FILEOPTIONS
-  _FILEDESCRIPTORPROTO.fields_by_name['source_code_info'].message_type = _SOURCECODEINFO
-  _DESCRIPTORPROTO_EXTENSIONRANGE.containing_type = _DESCRIPTORPROTO
-  _DESCRIPTORPROTO.fields_by_name['field'].message_type = _FIELDDESCRIPTORPROTO
-  _DESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO
-  _DESCRIPTORPROTO.fields_by_name['nested_type'].message_type = _DESCRIPTORPROTO
-  _DESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO
-  _DESCRIPTORPROTO.fields_by_name['extension_range'].message_type = _DESCRIPTORPROTO_EXTENSIONRANGE
-  _DESCRIPTORPROTO.fields_by_name['oneof_decl'].message_type = _ONEOFDESCRIPTORPROTO
-  _DESCRIPTORPROTO.fields_by_name['options'].message_type = _MESSAGEOPTIONS
-  _FIELDDESCRIPTORPROTO.fields_by_name['label'].enum_type = _FIELDDESCRIPTORPROTO_LABEL
-  _FIELDDESCRIPTORPROTO.fields_by_name['type'].enum_type = _FIELDDESCRIPTORPROTO_TYPE
-  _FIELDDESCRIPTORPROTO.fields_by_name['options'].message_type = _FIELDOPTIONS
-  _FIELDDESCRIPTORPROTO_TYPE.containing_type = _FIELDDESCRIPTORPROTO
-  _FIELDDESCRIPTORPROTO_LABEL.containing_type = _FIELDDESCRIPTORPROTO
-  _ENUMDESCRIPTORPROTO.fields_by_name['value'].message_type = _ENUMVALUEDESCRIPTORPROTO
-  _ENUMDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMOPTIONS
-  _ENUMVALUEDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMVALUEOPTIONS
-  _SERVICEDESCRIPTORPROTO.fields_by_name['method'].message_type = _METHODDESCRIPTORPROTO
-  _SERVICEDESCRIPTORPROTO.fields_by_name['options'].message_type = _SERVICEOPTIONS
-  _METHODDESCRIPTORPROTO.fields_by_name['options'].message_type = _METHODOPTIONS
-  _FILEOPTIONS.fields_by_name['optimize_for'].enum_type = _FILEOPTIONS_OPTIMIZEMODE
-  _FILEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-  _FILEOPTIONS_OPTIMIZEMODE.containing_type = _FILEOPTIONS
-  _MESSAGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-  _FIELDOPTIONS.fields_by_name['ctype'].enum_type = _FIELDOPTIONS_CTYPE
-  _FIELDOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-  _FIELDOPTIONS_CTYPE.containing_type = _FIELDOPTIONS
-  _ENUMOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-  _ENUMVALUEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-  _SERVICEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-  _METHODOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-  _UNINTERPRETEDOPTION_NAMEPART.containing_type = _UNINTERPRETEDOPTION
-  _UNINTERPRETEDOPTION.fields_by_name['name'].message_type = _UNINTERPRETEDOPTION_NAMEPART
-  _SOURCECODEINFO_LOCATION.containing_type = _SOURCECODEINFO
-  _SOURCECODEINFO.fields_by_name['location'].message_type = _SOURCECODEINFO_LOCATION
-  DESCRIPTOR.message_types_by_name['FileDescriptorSet'] = _FILEDESCRIPTORSET
-  DESCRIPTOR.message_types_by_name['FileDescriptorProto'] = _FILEDESCRIPTORPROTO
-  DESCRIPTOR.message_types_by_name['DescriptorProto'] = _DESCRIPTORPROTO
-  DESCRIPTOR.message_types_by_name['FieldDescriptorProto'] = _FIELDDESCRIPTORPROTO
-  DESCRIPTOR.message_types_by_name['OneofDescriptorProto'] = _ONEOFDESCRIPTORPROTO
-  DESCRIPTOR.message_types_by_name['EnumDescriptorProto'] = _ENUMDESCRIPTORPROTO
-  DESCRIPTOR.message_types_by_name['EnumValueDescriptorProto'] = _ENUMVALUEDESCRIPTORPROTO
-  DESCRIPTOR.message_types_by_name['ServiceDescriptorProto'] = _SERVICEDESCRIPTORPROTO
-  DESCRIPTOR.message_types_by_name['MethodDescriptorProto'] = _METHODDESCRIPTORPROTO
-  DESCRIPTOR.message_types_by_name['FileOptions'] = _FILEOPTIONS
-  DESCRIPTOR.message_types_by_name['MessageOptions'] = _MESSAGEOPTIONS
-  DESCRIPTOR.message_types_by_name['FieldOptions'] = _FIELDOPTIONS
-  DESCRIPTOR.message_types_by_name['EnumOptions'] = _ENUMOPTIONS
-  DESCRIPTOR.message_types_by_name['EnumValueOptions'] = _ENUMVALUEOPTIONS
-  DESCRIPTOR.message_types_by_name['ServiceOptions'] = _SERVICEOPTIONS
-  DESCRIPTOR.message_types_by_name['MethodOptions'] = _METHODOPTIONS
-  DESCRIPTOR.message_types_by_name['UninterpretedOption'] = _UNINTERPRETEDOPTION
-  DESCRIPTOR.message_types_by_name['SourceCodeInfo'] = _SOURCECODEINFO
-  _sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-else:
-  _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
-_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.descriptor_pb2', globals())
-if _descriptor._USE_C_DESCRIPTORS == False:
-
-  DESCRIPTOR._options = None
-  _FILEDESCRIPTORSET._serialized_start=53
-  _FILEDESCRIPTORSET._serialized_end=124
-  _FILEDESCRIPTORPROTO._serialized_start=127
-  _FILEDESCRIPTORPROTO._serialized_end=602
-  _DESCRIPTORPROTO._serialized_start=605
-  _DESCRIPTORPROTO._serialized_end=1089
-  _DESCRIPTORPROTO_EXTENSIONRANGE._serialized_start=1045
-  _DESCRIPTORPROTO_EXTENSIONRANGE._serialized_end=1089
-  _FIELDDESCRIPTORPROTO._serialized_start=1092
-  _FIELDDESCRIPTORPROTO._serialized_end=1773
-  _FIELDDESCRIPTORPROTO_TYPE._serialized_start=1394
-  _FIELDDESCRIPTORPROTO_TYPE._serialized_end=1704
-  _FIELDDESCRIPTORPROTO_LABEL._serialized_start=1706
-  _FIELDDESCRIPTORPROTO_LABEL._serialized_end=1773
-  _ONEOFDESCRIPTORPROTO._serialized_start=1775
-  _ONEOFDESCRIPTORPROTO._serialized_end=1811
-  _ENUMDESCRIPTORPROTO._serialized_start=1814
-  _ENUMDESCRIPTORPROTO._serialized_end=1954
-  _ENUMVALUEDESCRIPTORPROTO._serialized_start=1956
-  _ENUMVALUEDESCRIPTORPROTO._serialized_end=2064
-  _SERVICEDESCRIPTORPROTO._serialized_start=2067
-  _SERVICEDESCRIPTORPROTO._serialized_end=2211
-  _METHODDESCRIPTORPROTO._serialized_start=2214
-  _METHODDESCRIPTORPROTO._serialized_end=2407
-  _FILEOPTIONS._serialized_start=2410
-  _FILEOPTIONS._serialized_end=2998
-  _FILEOPTIONS_OPTIMIZEMODE._serialized_start=2929
-  _FILEOPTIONS_OPTIMIZEMODE._serialized_end=2987
-  _MESSAGEOPTIONS._serialized_start=3001
-  _MESSAGEOPTIONS._serialized_end=3231
-  _FIELDOPTIONS._serialized_start=3234
-  _FIELDOPTIONS._serialized_end=3522
-  _FIELDOPTIONS_CTYPE._serialized_start=3464
-  _FIELDOPTIONS_CTYPE._serialized_end=3511
-  _ENUMOPTIONS._serialized_start=3525
-  _ENUMOPTIONS._serialized_end=3666
-  _ENUMVALUEOPTIONS._serialized_start=3668
-  _ENUMVALUEOPTIONS._serialized_end=3793
-  _SERVICEOPTIONS._serialized_start=3795
-  _SERVICEOPTIONS._serialized_end=3918
-  _METHODOPTIONS._serialized_start=3920
-  _METHODOPTIONS._serialized_end=4042
-  _UNINTERPRETEDOPTION._serialized_start=4045
-  _UNINTERPRETEDOPTION._serialized_end=4331
-  _UNINTERPRETEDOPTION_NAMEPART._serialized_start=4280
-  _UNINTERPRETEDOPTION_NAMEPART._serialized_end=4331
-  _SOURCECODEINFO._serialized_start=4334
-  _SOURCECODEINFO._serialized_end=4511
-  _SOURCECODEINFO_LOCATION._serialized_start=4412
-  _SOURCECODEINFO_LOCATION._serialized_end=4511
-# @@protoc_insertion_point(module_scope)
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/nanopb_pb2.py b/src/antlion/controllers/buds_lib/dev_utils/proto/gen/nanopb_pb2.py
deleted file mode 100644
index c23077a..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/nanopb_pb2.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: nanopb.proto
-"""Generated protocol buffer code."""
-from google.protobuf.internal import builder as _builder
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import descriptor_pool as _descriptor_pool
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
-
-
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cnanopb.proto\x1a google/protobuf/descriptor.proto\"\x80\x02\n\rNanoPBOptions\x12\x10\n\x08max_size\x18\x01 \x01(\x05\x12\x11\n\tmax_count\x18\x02 \x01(\x05\x12&\n\x08int_size\x18\x07 \x01(\x0e\x32\x08.IntSize:\nIS_DEFAULT\x12$\n\x04type\x18\x03 \x01(\x0e\x32\n.FieldType:\nFT_DEFAULT\x12\x18\n\nlong_names\x18\x04 \x01(\x08:\x04true\x12\x1c\n\rpacked_struct\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x0cskip_message\x18\x06 \x01(\x08:\x05\x66\x61lse\x12\x18\n\tno_unions\x18\x08 \x01(\x08:\x05\x66\x61lse\x12\r\n\x05msgid\x18\t \x01(\r*Z\n\tFieldType\x12\x0e\n\nFT_DEFAULT\x10\x00\x12\x0f\n\x0b\x46T_CALLBACK\x10\x01\x12\x0e\n\nFT_POINTER\x10\x04\x12\r\n\tFT_STATIC\x10\x02\x12\r\n\tFT_IGNORE\x10\x03*D\n\x07IntSize\x12\x0e\n\nIS_DEFAULT\x10\x00\x12\x08\n\x04IS_8\x10\x08\x12\t\n\x05IS_16\x10\x10\x12\t\n\x05IS_32\x10 \x12\t\n\x05IS_64\x10@:E\n\x0enanopb_fileopt\x12\x1c.google.protobuf.FileOptions\x18\xf2\x07 \x01(\x0b\x32\x0e.NanoPBOptions:G\n\rnanopb_msgopt\x12\x1f.google.protobuf.MessageOptions\x18\xf2\x07 \x01(\x0b\x32\x0e.NanoPBOptions:E\n\x0enanopb_enumopt\x12\x1c.google.protobuf.EnumOptions\x18\xf2\x07 \x01(\x0b\x32\x0e.NanoPBOptions:>\n\x06nanopb\x12\x1d.google.protobuf.FieldOptions\x18\xf2\x07 \x01(\x0b\x32\x0e.NanoPBOptionsB\x1a\n\x18\x66i.kapsi.koti.jpa.nanopb')
-
-_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
-_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'nanopb_pb2', globals())
-if _descriptor._USE_C_DESCRIPTORS == False:
-  google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(nanopb_fileopt)
-  google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(nanopb_msgopt)
-  google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(nanopb_enumopt)
-  google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(nanopb)
-
-  DESCRIPTOR._options = None
-  DESCRIPTOR._serialized_options = b'\n\030fi.kapsi.koti.jpa.nanopb'
-  _FIELDTYPE._serialized_start=309
-  _FIELDTYPE._serialized_end=399
-  _INTSIZE._serialized_start=401
-  _INTSIZE._serialized_end=469
-  _NANOPBOPTIONS._serialized_start=51
-  _NANOPBOPTIONS._serialized_end=307
-# @@protoc_insertion_point(module_scope)
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/plugin_pb2.py b/src/antlion/controllers/buds_lib/dev_utils/proto/gen/plugin_pb2.py
deleted file mode 100644
index 79fffcd..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/plugin_pb2.py
+++ /dev/null
@@ -1,31 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: plugin.proto
-"""Generated protocol buffer code."""
-from google.protobuf.internal import builder as _builder
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import descriptor_pool as _descriptor_pool
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
-
-
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cplugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"}\n\x14\x43odeGeneratorRequest\x12\x18\n\x10\x66ile_to_generate\x18\x01 \x03(\t\x12\x11\n\tparameter\x18\x02 \x01(\t\x12\x38\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xaa\x01\n\x15\x43odeGeneratorResponse\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x42\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.File\x1a>\n\x04\x46ile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0finsertion_point\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x0f \x01(\tB,\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtos')
-
-_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
-_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'plugin_pb2', globals())
-if _descriptor._USE_C_DESCRIPTORS == False:
-
-  DESCRIPTOR._options = None
-  DESCRIPTOR._serialized_options = b'\n\034com.google.protobuf.compilerB\014PluginProtos'
-  _CODEGENERATORREQUEST._serialized_start=76
-  _CODEGENERATORREQUEST._serialized_end=201
-  _CODEGENERATORRESPONSE._serialized_start=204
-  _CODEGENERATORRESPONSE._serialized_end=374
-  _CODEGENERATORRESPONSE_FILE._serialized_start=312
-  _CODEGENERATORRESPONSE_FILE._serialized_end=374
-# @@protoc_insertion_point(module_scope)
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/google/protobuf/descriptor.proto b/src/antlion/controllers/buds_lib/dev_utils/proto/google/protobuf/descriptor.proto
deleted file mode 100644
index e17c0cc..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/google/protobuf/descriptor.proto
+++ /dev/null
@@ -1,714 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc.  All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-//     * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-//     * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-//     * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: kenton@google.com (Kenton Varda)
-//  Based on original Protocol Buffers design by
-//  Sanjay Ghemawat, Jeff Dean, and others.
-//
-// The messages in this file describe the definitions found in .proto files.
-// A valid .proto file can be translated directly to a FileDescriptorProto
-// without any other information (e.g. without reading its imports).
-
-
-syntax = "proto2";
-
-package google.protobuf;
-option java_package = "com.google.protobuf";
-option java_outer_classname = "DescriptorProtos";
-
-// descriptor.proto must be optimized for speed because reflection-based
-// algorithms don't work during bootstrapping.
-option optimize_for = SPEED;
-
-// The protocol compiler can output a FileDescriptorSet containing the .proto
-// files it parses.
-message FileDescriptorSet {
-  repeated FileDescriptorProto file = 1;
-}
-
-// Describes a complete .proto file.
-message FileDescriptorProto {
-  optional string name = 1;       // file name, relative to root of source tree
-  optional string package = 2;    // e.g. "foo", "foo.bar", etc.
-
-  // Names of files imported by this file.
-  repeated string dependency = 3;
-  // Indexes of the public imported files in the dependency list above.
-  repeated int32 public_dependency = 10;
-  // Indexes of the weak imported files in the dependency list.
-  // For Google-internal migration only. Do not use.
-  repeated int32 weak_dependency = 11;
-
-  // All top-level definitions in this file.
-  repeated DescriptorProto message_type = 4;
-  repeated EnumDescriptorProto enum_type = 5;
-  repeated ServiceDescriptorProto service = 6;
-  repeated FieldDescriptorProto extension = 7;
-
-  optional FileOptions options = 8;
-
-  // This field contains optional information about the original source code.
-  // You may safely remove this entire field without harming runtime
-  // functionality of the descriptors -- the information is needed only by
-  // development tools.
-  optional SourceCodeInfo source_code_info = 9;
-
-  // The syntax of the proto file.
-  // The supported values are "proto2" and "proto3".
-  optional string syntax = 12;
-}
-
-// Describes a message type.
-message DescriptorProto {
-  optional string name = 1;
-
-  repeated FieldDescriptorProto field = 2;
-  repeated FieldDescriptorProto extension = 6;
-
-  repeated DescriptorProto nested_type = 3;
-  repeated EnumDescriptorProto enum_type = 4;
-
-  message ExtensionRange {
-    optional int32 start = 1;
-    optional int32 end = 2;
-  }
-  repeated ExtensionRange extension_range = 5;
-
-  repeated OneofDescriptorProto oneof_decl = 8;
-
-  optional MessageOptions options = 7;
-}
-
-// Describes a field within a message.
-message FieldDescriptorProto {
-  enum Type {
-    // 0 is reserved for errors.
-    // Order is weird for historical reasons.
-    TYPE_DOUBLE         = 1;
-    TYPE_FLOAT          = 2;
-    // Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT64 if
-    // negative values are likely.
-    TYPE_INT64          = 3;
-    TYPE_UINT64         = 4;
-    // Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT32 if
-    // negative values are likely.
-    TYPE_INT32          = 5;
-    TYPE_FIXED64        = 6;
-    TYPE_FIXED32        = 7;
-    TYPE_BOOL           = 8;
-    TYPE_STRING         = 9;
-    TYPE_GROUP          = 10;  // Tag-delimited aggregate.
-    TYPE_MESSAGE        = 11;  // Length-delimited aggregate.
-
-    // New in version 2.
-    TYPE_BYTES          = 12;
-    TYPE_UINT32         = 13;
-    TYPE_ENUM           = 14;
-    TYPE_SFIXED32       = 15;
-    TYPE_SFIXED64       = 16;
-    TYPE_SINT32         = 17;  // Uses ZigZag encoding.
-    TYPE_SINT64         = 18;  // Uses ZigZag encoding.
-  };
-
-  enum Label {
-    // 0 is reserved for errors
-    LABEL_OPTIONAL      = 1;
-    LABEL_REQUIRED      = 2;
-    LABEL_REPEATED      = 3;
-    // TODO(sanjay): Should we add LABEL_MAP?
-  };
-
-  optional string name = 1;
-  optional int32 number = 3;
-  optional Label label = 4;
-
-  // If type_name is set, this need not be set.  If both this and type_name
-  // are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP.
-  optional Type type = 5;
-
-  // For message and enum types, this is the name of the type.  If the name
-  // starts with a '.', it is fully-qualified.  Otherwise, C++-like scoping
-  // rules are used to find the type (i.e. first the nested types within this
-  // message are searched, then within the parent, on up to the root
-  // namespace).
-  optional string type_name = 6;
-
-  // For extensions, this is the name of the type being extended.  It is
-  // resolved in the same manner as type_name.
-  optional string extendee = 2;
-
-  // For numeric types, contains the original text representation of the value.
-  // For booleans, "true" or "false".
-  // For strings, contains the default text contents (not escaped in any way).
-  // For bytes, contains the C escaped value.  All bytes >= 128 are escaped.
-  // TODO(kenton):  Base-64 encode?
-  optional string default_value = 7;
-
-  // If set, gives the index of a oneof in the containing type's oneof_decl
-  // list.  This field is a member of that oneof.  Extensions of a oneof should
-  // not set this since the oneof to which they belong will be inferred based
-  // on the extension range containing the extension's field number.
-  optional int32 oneof_index = 9;
-
-  optional FieldOptions options = 8;
-}
-
-// Describes a oneof.
-message OneofDescriptorProto {
-  optional string name = 1;
-}
-
-// Describes an enum type.
-message EnumDescriptorProto {
-  optional string name = 1;
-
-  repeated EnumValueDescriptorProto value = 2;
-
-  optional EnumOptions options = 3;
-}
-
-// Describes a value within an enum.
-message EnumValueDescriptorProto {
-  optional string name = 1;
-  optional int32 number = 2;
-
-  optional EnumValueOptions options = 3;
-}
-
-// Describes a service.
-message ServiceDescriptorProto {
-  optional string name = 1;
-  repeated MethodDescriptorProto method = 2;
-
-  optional ServiceOptions options = 3;
-}
-
-// Describes a method of a service.
-message MethodDescriptorProto {
-  optional string name = 1;
-
-  // Input and output type names.  These are resolved in the same way as
-  // FieldDescriptorProto.type_name, but must refer to a message type.
-  optional string input_type = 2;
-  optional string output_type = 3;
-
-  optional MethodOptions options = 4;
-
-  // Identifies if client streams multiple client messages
-  optional bool client_streaming = 5 [default=false];
-  // Identifies if server streams multiple server messages
-  optional bool server_streaming = 6 [default=false];
-}
-
-
-// ===================================================================
-// Options
-
-// Each of the definitions above may have "options" attached.  These are
-// just annotations which may cause code to be generated slightly differently
-// or may contain hints for code that manipulates protocol messages.
-//
-// Clients may define custom options as extensions of the *Options messages.
-// These extensions may not yet be known at parsing time, so the parser cannot
-// store the values in them.  Instead it stores them in a field in the *Options
-// message called uninterpreted_option. This field must have the same name
-// across all *Options messages. We then use this field to populate the
-// extensions when we build a descriptor, at which point all protos have been
-// parsed and so all extensions are known.
-//
-// Extension numbers for custom options may be chosen as follows:
-// * For options which will only be used within a single application or
-//   organization, or for experimental options, use field numbers 50000
-//   through 99999.  It is up to you to ensure that you do not use the
-//   same number for multiple options.
-// * For options which will be published and used publicly by multiple
-//   independent entities, e-mail protobuf-global-extension-registry@google.com
-//   to reserve extension numbers. Simply provide your project name (e.g.
-//   Object-C plugin) and your porject website (if available) -- there's no need
-//   to explain how you intend to use them. Usually you only need one extension
-//   number. You can declare multiple options with only one extension number by
-//   putting them in a sub-message. See the Custom Options section of the docs
-//   for examples:
-//   https://developers.google.com/protocol-buffers/docs/proto#options
-//   If this turns out to be popular, a web service will be set up
-//   to automatically assign option numbers.
-
-
-message FileOptions {
-
-  // Sets the Java package where classes generated from this .proto will be
-  // placed.  By default, the proto package is used, but this is often
-  // inappropriate because proto packages do not normally start with backwards
-  // domain names.
-  optional string java_package = 1;
-
-
-  // If set, all the classes from the .proto file are wrapped in a single
-  // outer class with the given name.  This applies to both Proto1
-  // (equivalent to the old "--one_java_file" option) and Proto2 (where
-  // a .proto always translates to a single class, but you may want to
-  // explicitly choose the class name).
-  optional string java_outer_classname = 8;
-
-  // If set true, then the Java code generator will generate a separate .java
-  // file for each top-level message, enum, and service defined in the .proto
-  // file.  Thus, these types will *not* be nested inside the outer class
-  // named by java_outer_classname.  However, the outer class will still be
-  // generated to contain the file's getDescriptor() method as well as any
-  // top-level extensions defined in the file.
-  optional bool java_multiple_files = 10 [default=false];
-
-  // If set true, then the Java code generator will generate equals() and
-  // hashCode() methods for all messages defined in the .proto file.
-  // - In the full runtime, this is purely a speed optimization, as the
-  // AbstractMessage base class includes reflection-based implementations of
-  // these methods.
-  //- In the lite runtime, setting this option changes the semantics of
-  // equals() and hashCode() to more closely match those of the full runtime;
-  // the generated methods compute their results based on field values rather
-  // than object identity. (Implementations should not assume that hashcodes
-  // will be consistent across runtimes or versions of the protocol compiler.)
-  optional bool java_generate_equals_and_hash = 20 [default=false];
-
-  // If set true, then the Java2 code generator will generate code that
-  // throws an exception whenever an attempt is made to assign a non-UTF-8
-  // byte sequence to a string field.
-  // Message reflection will do the same.
-  // However, an extension field still accepts non-UTF-8 byte sequences.
-  // This option has no effect on when used with the lite runtime.
-  optional bool java_string_check_utf8 = 27 [default=false];
-
-
-  // Generated classes can be optimized for speed or code size.
-  enum OptimizeMode {
-    SPEED = 1;        // Generate complete code for parsing, serialization,
-                      // etc.
-    CODE_SIZE = 2;    // Use ReflectionOps to implement these methods.
-    LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime.
-  }
-  optional OptimizeMode optimize_for = 9 [default=SPEED];
-
-  // Sets the Go package where structs generated from this .proto will be
-  // placed. If omitted, the Go package will be derived from the following:
-  //   - The basename of the package import path, if provided.
-  //   - Otherwise, the package statement in the .proto file, if present.
-  //   - Otherwise, the basename of the .proto file, without extension.
-  optional string go_package = 11;
-
-
-
-  // Should generic services be generated in each language?  "Generic" services
-  // are not specific to any particular RPC system.  They are generated by the
-  // main code generators in each language (without additional plugins).
-  // Generic services were the only kind of service generation supported by
-  // early versions of google.protobuf.
-  //
-  // Generic services are now considered deprecated in favor of using plugins
-  // that generate code specific to your particular RPC system.  Therefore,
-  // these default to false.  Old code which depends on generic services should
-  // explicitly set them to true.
-  optional bool cc_generic_services = 16 [default=false];
-  optional bool java_generic_services = 17 [default=false];
-  optional bool py_generic_services = 18 [default=false];
-
-  // Is this file deprecated?
-  // Depending on the target platform, this can emit Deprecated annotations
-  // for everything in the file, or it will be completely ignored; in the very
-  // least, this is a formalization for deprecating files.
-  optional bool deprecated = 23 [default=false];
-
-
-  // Enables the use of arenas for the proto messages in this file. This applies
-  // only to generated classes for C++.
-  optional bool cc_enable_arenas = 31 [default=false];
-
-
-  // The parser stores options it doesn't recognize here. See above.
-  repeated UninterpretedOption uninterpreted_option = 999;
-
-  // Clients can define custom options in extensions of this message. See above.
-  extensions 1000 to max;
-}
-
-message MessageOptions {
-  // Set true to use the old proto1 MessageSet wire format for extensions.
-  // This is provided for backwards-compatibility with the MessageSet wire
-  // format.  You should not use this for any other reason:  It's less
-  // efficient, has fewer features, and is more complicated.
-  //
-  // The message must be defined exactly as follows:
-  //   message Foo {
-  //     option message_set_wire_format = true;
-  //     extensions 4 to max;
-  //   }
-  // Note that the message cannot have any defined fields; MessageSets only
-  // have extensions.
-  //
-  // All extensions of your type must be singular messages; e.g. they cannot
-  // be int32s, enums, or repeated messages.
-  //
-  // Because this is an option, the above two restrictions are not enforced by
-  // the protocol compiler.
-  optional bool message_set_wire_format = 1 [default=false];
-
-  // Disables the generation of the standard "descriptor()" accessor, which can
-  // conflict with a field of the same name.  This is meant to make migration
-  // from proto1 easier; new code should avoid fields named "descriptor".
-  optional bool no_standard_descriptor_accessor = 2 [default=false];
-
-  // Is this message deprecated?
-  // Depending on the target platform, this can emit Deprecated annotations
-  // for the message, or it will be completely ignored; in the very least,
-  // this is a formalization for deprecating messages.
-  optional bool deprecated = 3 [default=false];
-
-  // Whether the message is an automatically generated map entry type for the
-  // maps field.
-  //
-  // For maps fields:
-  //     map<KeyType, ValueType> map_field = 1;
-  // The parsed descriptor looks like:
-  //     message MapFieldEntry {
-  //         option map_entry = true;
-  //         optional KeyType key = 1;
-  //         optional ValueType value = 2;
-  //     }
-  //     repeated MapFieldEntry map_field = 1;
-  //
-  // Implementations may choose not to generate the map_entry=true message, but
-  // use a native map in the target language to hold the keys and values.
-  // The reflection APIs in such implementions still need to work as
-  // if the field is a repeated message field.
-  //
-  // NOTE: Do not set the option in .proto files. Always use the maps syntax
-  // instead. The option should only be implicitly set by the proto compiler
-  // parser.
-  optional bool map_entry = 7;
-
-  // The parser stores options it doesn't recognize here. See above.
-  repeated UninterpretedOption uninterpreted_option = 999;
-
-  // Clients can define custom options in extensions of this message. See above.
-  extensions 1000 to max;
-}
-
-message FieldOptions {
-  // The ctype option instructs the C++ code generator to use a different
-  // representation of the field than it normally would.  See the specific
-  // options below.  This option is not yet implemented in the open source
-  // release -- sorry, we'll try to include it in a future version!
-  optional CType ctype = 1 [default = STRING];
-  enum CType {
-    // Default mode.
-    STRING = 0;
-
-    CORD = 1;
-
-    STRING_PIECE = 2;
-  }
-  // The packed option can be enabled for repeated primitive fields to enable
-  // a more efficient representation on the wire. Rather than repeatedly
-  // writing the tag and type for each element, the entire array is encoded as
-  // a single length-delimited blob.
-  optional bool packed = 2;
-
-
-
-  // Should this field be parsed lazily?  Lazy applies only to message-type
-  // fields.  It means that when the outer message is initially parsed, the
-  // inner message's contents will not be parsed but instead stored in encoded
-  // form.  The inner message will actually be parsed when it is first accessed.
-  //
-  // This is only a hint.  Implementations are free to choose whether to use
-  // eager or lazy parsing regardless of the value of this option.  However,
-  // setting this option true suggests that the protocol author believes that
-  // using lazy parsing on this field is worth the additional bookkeeping
-  // overhead typically needed to implement it.
-  //
-  // This option does not affect the public interface of any generated code;
-  // all method signatures remain the same.  Furthermore, thread-safety of the
-  // interface is not affected by this option; const methods remain safe to
-  // call from multiple threads concurrently, while non-const methods continue
-  // to require exclusive access.
-  //
-  //
-  // Note that implementations may choose not to check required fields within
-  // a lazy sub-message.  That is, calling IsInitialized() on the outher message
-  // may return true even if the inner message has missing required fields.
-  // This is necessary because otherwise the inner message would have to be
-  // parsed in order to perform the check, defeating the purpose of lazy
-  // parsing.  An implementation which chooses not to check required fields
-  // must be consistent about it.  That is, for any particular sub-message, the
-  // implementation must either *always* check its required fields, or *never*
-  // check its required fields, regardless of whether or not the message has
-  // been parsed.
-  optional bool lazy = 5 [default=false];
-
-  // Is this field deprecated?
-  // Depending on the target platform, this can emit Deprecated annotations
-  // for accessors, or it will be completely ignored; in the very least, this
-  // is a formalization for deprecating fields.
-  optional bool deprecated = 3 [default=false];
-
-  // For Google-internal migration only. Do not use.
-  optional bool weak = 10 [default=false];
-
-
-
-  // The parser stores options it doesn't recognize here. See above.
-  repeated UninterpretedOption uninterpreted_option = 999;
-
-  // Clients can define custom options in extensions of this message. See above.
-  extensions 1000 to max;
-}
-
-message EnumOptions {
-
-  // Set this option to true to allow mapping different tag names to the same
-  // value.
-  optional bool allow_alias = 2;
-
-  // Is this enum deprecated?
-  // Depending on the target platform, this can emit Deprecated annotations
-  // for the enum, or it will be completely ignored; in the very least, this
-  // is a formalization for deprecating enums.
-  optional bool deprecated = 3 [default=false];
-
-  // The parser stores options it doesn't recognize here. See above.
-  repeated UninterpretedOption uninterpreted_option = 999;
-
-  // Clients can define custom options in extensions of this message. See above.
-  extensions 1000 to max;
-}
-
-message EnumValueOptions {
-  // Is this enum value deprecated?
-  // Depending on the target platform, this can emit Deprecated annotations
-  // for the enum value, or it will be completely ignored; in the very least,
-  // this is a formalization for deprecating enum values.
-  optional bool deprecated = 1 [default=false];
-
-  // The parser stores options it doesn't recognize here. See above.
-  repeated UninterpretedOption uninterpreted_option = 999;
-
-  // Clients can define custom options in extensions of this message. See above.
-  extensions 1000 to max;
-}
-
-message ServiceOptions {
-
-  // Note:  Field numbers 1 through 32 are reserved for Google's internal RPC
-  //   framework.  We apologize for hoarding these numbers to ourselves, but
-  //   we were already using them long before we decided to release Protocol
-  //   Buffers.
-
-  // Is this service deprecated?
-  // Depending on the target platform, this can emit Deprecated annotations
-  // for the service, or it will be completely ignored; in the very least,
-  // this is a formalization for deprecating services.
-  optional bool deprecated = 33 [default=false];
-
-  // The parser stores options it doesn't recognize here. See above.
-  repeated UninterpretedOption uninterpreted_option = 999;
-
-  // Clients can define custom options in extensions of this message. See above.
-  extensions 1000 to max;
-}
-
-message MethodOptions {
-
-  // Note:  Field numbers 1 through 32 are reserved for Google's internal RPC
-  //   framework.  We apologize for hoarding these numbers to ourselves, but
-  //   we were already using them long before we decided to release Protocol
-  //   Buffers.
-
-  // Is this method deprecated?
-  // Depending on the target platform, this can emit Deprecated annotations
-  // for the method, or it will be completely ignored; in the very least,
-  // this is a formalization for deprecating methods.
-  optional bool deprecated = 33 [default=false];
-
-  // The parser stores options it doesn't recognize here. See above.
-  repeated UninterpretedOption uninterpreted_option = 999;
-
-  // Clients can define custom options in extensions of this message. See above.
-  extensions 1000 to max;
-}
-
-
-// A message representing a option the parser does not recognize. This only
-// appears in options protos created by the compiler::Parser class.
-// DescriptorPool resolves these when building Descriptor objects. Therefore,
-// options protos in descriptor objects (e.g. returned by Descriptor::options(),
-// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions
-// in them.
-message UninterpretedOption {
-  // The name of the uninterpreted option.  Each string represents a segment in
-  // a dot-separated name.  is_extension is true iff a segment represents an
-  // extension (denoted with parentheses in options specs in .proto files).
-  // E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents
-  // "foo.(bar.baz).qux".
-  message NamePart {
-    required string name_part = 1;
-    required bool is_extension = 2;
-  }
-  repeated NamePart name = 2;
-
-  // The value of the uninterpreted option, in whatever type the tokenizer
-  // identified it as during parsing. Exactly one of these should be set.
-  optional string identifier_value = 3;
-  optional uint64 positive_int_value = 4;
-  optional int64 negative_int_value = 5;
-  optional double double_value = 6;
-  optional bytes string_value = 7;
-  optional string aggregate_value = 8;
-}
-
-// ===================================================================
-// Optional source code info
-
-// Encapsulates information about the original source file from which a
-// FileDescriptorProto was generated.
-message SourceCodeInfo {
-  // A Location identifies a piece of source code in a .proto file which
-  // corresponds to a particular definition.  This information is intended
-  // to be useful to IDEs, code indexers, documentation generators, and similar
-  // tools.
-  //
-  // For example, say we have a file like:
-  //   message Foo {
-  //     optional string foo = 1;
-  //   }
-  // Let's look at just the field definition:
-  //   optional string foo = 1;
-  //   ^       ^^     ^^  ^  ^^^
-  //   a       bc     de  f  ghi
-  // We have the following locations:
-  //   span   path               represents
-  //   [a,i)  [ 4, 0, 2, 0 ]     The whole field definition.
-  //   [a,b)  [ 4, 0, 2, 0, 4 ]  The label (optional).
-  //   [c,d)  [ 4, 0, 2, 0, 5 ]  The type (string).
-  //   [e,f)  [ 4, 0, 2, 0, 1 ]  The name (foo).
-  //   [g,h)  [ 4, 0, 2, 0, 3 ]  The number (1).
-  //
-  // Notes:
-  // - A location may refer to a repeated field itself (i.e. not to any
-  //   particular index within it).  This is used whenever a set of elements are
-  //   logically enclosed in a single code segment.  For example, an entire
-  //   extend block (possibly containing multiple extension definitions) will
-  //   have an outer location whose path refers to the "extensions" repeated
-  //   field without an index.
-  // - Multiple locations may have the same path.  This happens when a single
-  //   logical declaration is spread out across multiple places.  The most
-  //   obvious example is the "extend" block again -- there may be multiple
-  //   extend blocks in the same scope, each of which will have the same path.
-  // - A location's span is not always a subset of its parent's span.  For
-  //   example, the "extendee" of an extension declaration appears at the
-  //   beginning of the "extend" block and is shared by all extensions within
-  //   the block.
-  // - Just because a location's span is a subset of some other location's span
-  //   does not mean that it is a descendent.  For example, a "group" defines
-  //   both a type and a field in a single declaration.  Thus, the locations
-  //   corresponding to the type and field and their components will overlap.
-  // - Code which tries to interpret locations should probably be designed to
-  //   ignore those that it doesn't understand, as more types of locations could
-  //   be recorded in the future.
-  repeated Location location = 1;
-  message Location {
-    // Identifies which part of the FileDescriptorProto was defined at this
-    // location.
-    //
-    // Each element is a field number or an index.  They form a path from
-    // the root FileDescriptorProto to the place where the definition.  For
-    // example, this path:
-    //   [ 4, 3, 2, 7, 1 ]
-    // refers to:
-    //   file.message_type(3)  // 4, 3
-    //       .field(7)         // 2, 7
-    //       .name()           // 1
-    // This is because FileDescriptorProto.message_type has field number 4:
-    //   repeated DescriptorProto message_type = 4;
-    // and DescriptorProto.field has field number 2:
-    //   repeated FieldDescriptorProto field = 2;
-    // and FieldDescriptorProto.name has field number 1:
-    //   optional string name = 1;
-    //
-    // Thus, the above path gives the location of a field name.  If we removed
-    // the last element:
-    //   [ 4, 3, 2, 7 ]
-    // this path refers to the whole field declaration (from the beginning
-    // of the label to the terminating semicolon).
-    repeated int32 path = 1 [packed=true];
-
-    // Always has exactly three or four elements: start line, start column,
-    // end line (optional, otherwise assumed same as start line), end column.
-    // These are packed into a single field for efficiency.  Note that line
-    // and column numbers are zero-based -- typically you will want to add
-    // 1 to each before displaying to a user.
-    repeated int32 span = 2 [packed=true];
-
-    // If this SourceCodeInfo represents a complete declaration, these are any
-    // comments appearing before and after the declaration which appear to be
-    // attached to the declaration.
-    //
-    // A series of line comments appearing on consecutive lines, with no other
-    // tokens appearing on those lines, will be treated as a single comment.
-    //
-    // Only the comment content is provided; comment markers (e.g. //) are
-    // stripped out.  For block comments, leading whitespace and an asterisk
-    // will be stripped from the beginning of each line other than the first.
-    // Newlines are included in the output.
-    //
-    // Examples:
-    //
-    //   optional int32 foo = 1;  // Comment attached to foo.
-    //   // Comment attached to bar.
-    //   optional int32 bar = 2;
-    //
-    //   optional string baz = 3;
-    //   // Comment attached to baz.
-    //   // Another line attached to baz.
-    //
-    //   // Comment attached to qux.
-    //   //
-    //   // Another line attached to qux.
-    //   optional double qux = 4;
-    //
-    //   optional string corge = 5;
-    //   /* Block comment attached
-    //    * to corge.  Leading asterisks
-    //    * will be removed. */
-    //   /* Block comment attached to
-    //    * grault. */
-    //   optional int32 grault = 6;
-    optional string leading_comments = 3;
-    optional string trailing_comments = 4;
-  }
-}
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/nanopb.proto b/src/antlion/controllers/buds_lib/dev_utils/proto/nanopb.proto
deleted file mode 100644
index 5053dfd..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/nanopb.proto
+++ /dev/null
@@ -1,89 +0,0 @@
-// Custom options for defining:
-// - Maximum size of string/bytes
-// - Maximum number of elements in array
-//
-// These are used by nanopb to generate statically allocable structures
-// for memory-limited environments.
-
-syntax = "proto2";
-import "google/protobuf/descriptor.proto";
-
-option java_package = "fi.kapsi.koti.jpa.nanopb";
-
-enum FieldType {
-    FT_DEFAULT = 0; // Automatically decide field type, generate static field if possible.
-    FT_CALLBACK = 1; // Always generate a callback field.
-    FT_POINTER = 4; // Always generate a dynamically allocated field.
-    FT_STATIC = 2; // Generate a static field or raise an exception if not possible.
-    FT_IGNORE = 3; // Ignore the field completely.
-}
-
-enum IntSize {
-    IS_DEFAULT = 0; // Default, 32/64bit based on type in .proto
-    IS_8 = 8;
-    IS_16 = 16;
-    IS_32 = 32;
-    IS_64 = 64;
-}
-
-// This is the inner options message, which basically defines options for
-// a field. When it is used in message or file scope, it applies to all
-// fields.
-message NanoPBOptions {
-  // Allocated size for 'bytes' and 'string' fields.
-  optional int32 max_size = 1;
-
-  // Allocated number of entries in arrays ('repeated' fields)
-  optional int32 max_count = 2;
-
-  // Size of integer fields. Can save some memory if you don't need
-  // full 32 bits for the value.
-  optional IntSize int_size = 7 [default = IS_DEFAULT];
-
-  // Force type of field (callback or static allocation)
-  optional FieldType type = 3 [default = FT_DEFAULT];
-
-  // Use long names for enums, i.e. EnumName_EnumValue.
-  optional bool long_names = 4 [default = true];
-
-  // Add 'packed' attribute to generated structs.
-  // Note: this cannot be used on CPUs that break on unaligned
-  // accesses to variables.
-  optional bool packed_struct = 5 [default = false];
-
-  // Skip this message
-  optional bool skip_message = 6 [default = false];
-
-  // Generate oneof fields as normal optional fields instead of union.
-  optional bool no_unions = 8 [default = false];
-
-  // integer type tag for a message
-  optional uint32 msgid = 9;
-}
-
-// Extensions to protoc 'Descriptor' type in order to define options
-// inside a .proto file.
-//
-// Protocol Buffers extension number registry
-// --------------------------------
-// Project:  Nanopb
-// Contact:  Petteri Aimonen <jpa@kapsi.fi>
-// Web site: http://kapsi.fi/~jpa/nanopb
-// Extensions: 1010 (all types)
-// --------------------------------
-
-extend google.protobuf.FileOptions {
-    optional NanoPBOptions nanopb_fileopt = 1010;
-}
-
-extend google.protobuf.MessageOptions {
-    optional NanoPBOptions nanopb_msgopt = 1010;
-}
-
-extend google.protobuf.EnumOptions {
-    optional NanoPBOptions nanopb_enumopt = 1010;
-}
-
-extend google.protobuf.FieldOptions {
-    optional NanoPBOptions nanopb = 1010;
-}
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/plugin.proto b/src/antlion/controllers/buds_lib/dev_utils/proto/plugin.proto
deleted file mode 100644
index e627289..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/plugin.proto
+++ /dev/null
@@ -1,148 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc.  All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-//     * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-//     * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-//     * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: kenton@google.com (Kenton Varda)
-//
-// WARNING:  The plugin interface is currently EXPERIMENTAL and is subject to
-//   change.
-//
-// protoc (aka the Protocol Compiler) can be extended via plugins.  A plugin is
-// just a program that reads a CodeGeneratorRequest from stdin and writes a
-// CodeGeneratorResponse to stdout.
-//
-// Plugins written using C++ can use google/protobuf/compiler/plugin.h instead
-// of dealing with the raw protocol defined here.
-//
-// A plugin executable needs only to be placed somewhere in the path.  The
-// plugin should be named "protoc-gen-$NAME", and will then be used when the
-// flag "--${NAME}_out" is passed to protoc.
-
-syntax = "proto2";
-package google.protobuf.compiler;
-option java_package = "com.google.protobuf.compiler";
-option java_outer_classname = "PluginProtos";
-
-import "google/protobuf/descriptor.proto";
-
-// An encoded CodeGeneratorRequest is written to the plugin's stdin.
-message CodeGeneratorRequest {
-  // The .proto files that were explicitly listed on the command-line.  The
-  // code generator should generate code only for these files.  Each file's
-  // descriptor will be included in proto_file, below.
-  repeated string file_to_generate = 1;
-
-  // The generator parameter passed on the command-line.
-  optional string parameter = 2;
-
-  // FileDescriptorProtos for all files in files_to_generate and everything
-  // they import.  The files will appear in topological order, so each file
-  // appears before any file that imports it.
-  //
-  // protoc guarantees that all proto_files will be written after
-  // the fields above, even though this is not technically guaranteed by the
-  // protobuf wire format.  This theoretically could allow a plugin to stream
-  // in the FileDescriptorProtos and handle them one by one rather than read
-  // the entire set into memory at once.  However, as of this writing, this
-  // is not similarly optimized on protoc's end -- it will store all fields in
-  // memory at once before sending them to the plugin.
-  repeated FileDescriptorProto proto_file = 15;
-}
-
-// The plugin writes an encoded CodeGeneratorResponse to stdout.
-message CodeGeneratorResponse {
-  // Error message.  If non-empty, code generation failed.  The plugin process
-  // should exit with status code zero even if it reports an error in this way.
-  //
-  // This should be used to indicate errors in .proto files which prevent the
-  // code generator from generating correct code.  Errors which indicate a
-  // problem in protoc itself -- such as the input CodeGeneratorRequest being
-  // unparseable -- should be reported by writing a message to stderr and
-  // exiting with a non-zero status code.
-  optional string error = 1;
-
-  // Represents a single generated file.
-  message File {
-    // The file name, relative to the output directory.  The name must not
-    // contain "." or ".." components and must be relative, not be absolute (so,
-    // the file cannot lie outside the output directory).  "/" must be used as
-    // the path separator, not "\".
-    //
-    // If the name is omitted, the content will be appended to the previous
-    // file.  This allows the generator to break large files into small chunks,
-    // and allows the generated text to be streamed back to protoc so that large
-    // files need not reside completely in memory at one time.  Note that as of
-    // this writing protoc does not optimize for this -- it will read the entire
-    // CodeGeneratorResponse before writing files to disk.
-    optional string name = 1;
-
-    // If non-empty, indicates that the named file should already exist, and the
-    // content here is to be inserted into that file at a defined insertion
-    // point.  This feature allows a code generator to extend the output
-    // produced by another code generator.  The original generator may provide
-    // insertion points by placing special annotations in the file that look
-    // like:
-    //   @@protoc_insertion_point(NAME)
-    // The annotation can have arbitrary text before and after it on the line,
-    // which allows it to be placed in a comment.  NAME should be replaced with
-    // an identifier naming the point -- this is what other generators will use
-    // as the insertion_point.  Code inserted at this point will be placed
-    // immediately above the line containing the insertion point (thus multiple
-    // insertions to the same point will come out in the order they were added).
-    // The double-@ is intended to make it unlikely that the generated code
-    // could contain things that look like insertion points by accident.
-    //
-    // For example, the C++ code generator places the following line in the
-    // .pb.h files that it generates:
-    //   // @@protoc_insertion_point(namespace_scope)
-    // This line appears within the scope of the file's package namespace, but
-    // outside of any particular class.  Another plugin can then specify the
-    // insertion_point "namespace_scope" to generate additional classes or
-    // other declarations that should be placed in this scope.
-    //
-    // Note that if the line containing the insertion point begins with
-    // whitespace, the same whitespace will be added to every line of the
-    // inserted text.  This is useful for languages like Python, where
-    // indentation matters.  In these languages, the insertion point comment
-    // should be indented the same amount as any inserted code will need to be
-    // in order to work correctly in that context.
-    //
-    // The code generator that generates the initial file and the one which
-    // inserts into it must both run as part of a single invocation of protoc.
-    // Code generators are executed in the order in which they appear on the
-    // command line.
-    //
-    // If |insertion_point| is present, |name| must also be present.
-    optional string insertion_point = 2;
-
-    // The file contents.
-    optional string content = 15;
-  }
-  repeated File file = 15;
-}
diff --git a/src/antlion/controllers/buds_lib/latency.py b/src/antlion/controllers/buds_lib/latency.py
deleted file mode 100644
index 1cd9c8d..0000000
--- a/src/antlion/controllers/buds_lib/latency.py
+++ /dev/null
@@ -1,281 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Processes profiling data to output latency numbers."""
-#
-# Type "python latency.py -h" for help
-#
-# Currently the log data is assumed to be in the following format:
-# PROF:<event-id> <timestamp>
-# The <event-id> and <timestamp> can be in the form of any valid
-# (positive)integer literal in Python
-# Examples:
-#   PROF:0x0006 0x00000155e0d043f1
-#   PROF:6 1468355593201
-
-import argparse
-from collections import defaultdict
-import csv
-import logging
-import math
-import os
-import string
-import xml.etree.ElementTree as ET
-
-valid_fname_chars = '-_.()%s%s' % (string.ascii_letters, string.digits)
-PERCENTILE_STEP = 1
-PROFILER_DATA_PREFIX = 'PROF:'
-
-
-class EventPair(object):
-
-    def __init__(self, pair_id, latency, name):
-        self.pair_id = pair_id
-        self.latency = latency
-        self.name = name
-
-
-class LatencyEntry(object):
-
-    def __init__(self, start_timestamp, latency):
-        self.start_timestamp = start_timestamp
-        self.latency = latency
-
-
-def parse_xml(xml_file):
-    """
-    Parse the configuration xml file.
-
-    Returns:
-      event_pairs_by_pair_id: dict mapping event id to event pair object
-      event_pairs_by_start_id: dict mapping starting event to list of event pairs
-                               with that starting event.
-      event_pairs_by_end_id: dict mapping ending event to list of event pairs
-                             with that ending event.
-    """
-    root = ET.parse(xml_file).getroot()
-    event_pairs = root.findall('event-pair')
-    event_pairs_by_pair_id = {}
-    event_pairs_by_start_id = defaultdict(list)
-    event_pairs_by_end_id = defaultdict(list)
-
-    for event_pair in event_pairs:
-        start_evt = root.find(
-            "./event[@id='{0:}']".format(event_pair.attrib['start-event']))
-        end_evt = root.find(
-            "./event[@id='{0:}']".format(event_pair.attrib['end-event']))
-        start = int(start_evt.attrib['id'], 0)
-        end = int(end_evt.attrib['id'], 0)
-        paird_id = start << 32 | end
-        if paird_id in event_pairs_by_pair_id:
-            logging.error('Latency event repeated: start id = %d, end id = %d',
-                          start,
-                          end)
-            continue
-        # Create the output file name base by concatenating:
-        # "input file name base" + start event name + "_to_" + end event name
-        evt_pair_name = start_evt.attrib['name'] + '_to_' + end_evt.attrib[
-            'name']
-        evt_pair_name = [
-            c if c in valid_fname_chars else '_' for c in evt_pair_name
-        ]
-        evt_pair_name = ''.join(evt_pair_name)
-        evt_list = EventPair(paird_id, 0, evt_pair_name)
-        event_pairs_by_pair_id[paird_id] = evt_list
-        event_pairs_by_start_id[start].append(evt_list)
-        event_pairs_by_end_id[end].append(evt_list)
-    return (event_pairs_by_pair_id, event_pairs_by_start_id,
-            event_pairs_by_end_id)
-
-
-def percentile_to_index(num_entries, percentile):
-    """
-    Returns the index in an array corresponding to a percentile.
-
-    Arguments:
-      num_entries: the number of entries in the array.
-      percentile: which percentile to calculate the index for.
-    Returns:
-      ind: the index in the array corresponding to the percentile.
-    """
-    ind = int(math.floor(float(num_entries) * percentile / 100))
-    if ind > 0:
-        ind -= 1
-    return ind
-
-
-def compute_latencies(input_file, event_pairs_by_start_id,
-                      event_pairs_by_end_id):
-    """Parse the input data file and compute latencies."""
-    line_num = 0
-    lat_tables_by_pair_id = defaultdict(list)
-    while True:
-        line_num += 1
-        line = input_file.readline()
-        if not line:
-            break
-        data = line.partition(PROFILER_DATA_PREFIX)[2]
-        if not data:
-            continue
-        try:
-            event_id, timestamp = [int(x, 0) for x in data.split()]
-        except ValueError:
-            logging.error('Badly formed event entry at line #%s: %s', line_num,
-                          line)
-            continue
-        # We use event_pair.latency to temporarily store the timestamp
-        # of the start event
-        for event_pair in event_pairs_by_start_id[event_id]:
-            event_pair.latency = timestamp
-        for event_pair in event_pairs_by_end_id[event_id]:
-            # compute the latency only if we have seen the corresponding
-            # start event already
-            if event_pair.latency:
-                lat_tables_by_pair_id[event_pair.pair_id].append(
-                    LatencyEntry(event_pair.latency,
-                                 timestamp - event_pair.latency))
-                event_pair.latency = 0
-    return lat_tables_by_pair_id
-
-
-def write_data(fname_base, event_pairs_by_pair_id, lat_tables_by_pair_id):
-    for event_id, lat_table in lat_tables_by_pair_id.items():
-        event_pair = event_pairs_by_pair_id[event_id]
-        with open(fname_base + '_' + event_pair.name + '_data.csv',
-                  'wb') as out_file:
-            csv_writer = csv.writer(out_file)
-            for dat in lat_table:
-                csv_writer.writerow([dat.start_timestamp, dat.latency])
-
-
-def write_summary(fname_base, event_pairs_by_pair_id, lat_tables_by_pair_id):
-    summaries = get_summaries(event_pairs_by_pair_id, lat_tables_by_pair_id)
-    for event_id, lat_table in lat_tables_by_pair_id.items():
-        event_pair = event_pairs_by_pair_id[event_id]
-        summary = summaries[event_pair.name]
-        latencies = summary['latencies']
-        num_latencies = summary['num_latencies']
-        with open(fname_base + '_' + event_pair.name + '_summary.txt',
-                  'wb') as out_file:
-            csv_writer = csv.writer(out_file)
-            csv_writer.writerow(['Percentile', 'Latency'])
-            # Write percentile table
-            for percentile in range(1, 101):
-                ind = percentile_to_index(num_latencies, percentile)
-                csv_writer.writerow([percentile, latencies[ind]])
-
-            # Write summary
-            print('\n\nTotal number of samples = {}'.format(num_latencies),
-                  file=out_file)
-            print('Min = {}'.format(summary['min_lat']), file=out_file)
-            print('Max = {}'.format(summary['max_lat']), file=out_file)
-            print('Average = {}'.format(summary['average_lat']), file=out_file)
-            print('Median = {}'.format(summary['median_lat']), file=out_file)
-            print('90 %ile = {}'.format(summary['90pctile']), file=out_file)
-            print('95 %ile = {}'.format(summary['95pctile']), file=out_file)
-
-
-def process_latencies(config_xml, input_file):
-    """
-    End to end function to compute latencies and summaries from input file.
-    Writes latency results to files in current directory.
-
-    Arguments:
-       config_xml: xml file specifying which event pairs to compute latency
-                   btwn.
-       input_file: text file containing the timestamped events, like a log file.
-    """
-    # Parse the event configuration file
-    (event_pairs_by_pair_id, event_pairs_by_start_id,
-     event_pairs_by_end_id) = parse_xml(config_xml)
-    # Compute latencies
-    lat_tables_by_pair_id = compute_latencies(input_file,
-                                              event_pairs_by_start_id,
-                                              event_pairs_by_end_id)
-    fname_base = os.path.splitext(os.path.basename(input_file.name))[0]
-    # Write the latency data and summary to respective files
-    write_data(fname_base, event_pairs_by_pair_id, lat_tables_by_pair_id)
-    write_summary(fname_base, event_pairs_by_pair_id, lat_tables_by_pair_id)
-
-
-def get_summaries(event_pairs_by_pair_id, lat_tables_by_pair_id):
-    """
-    Process significant summaries from a table of latencies.
-
-    Arguments:
-      event_pairs_by_pair_id: dict mapping event id to event pair object
-      lat_tables_by_pair_id: dict mapping event id to latency table
-    Returns:
-      summaries: dict mapping event pair name to significant summary metrics.
-    """
-    summaries = {}
-    for event_id, lat_table in lat_tables_by_pair_id.items():
-        event_summary = {}
-        event_pair = event_pairs_by_pair_id[event_id]
-        latencies = [entry.latency for entry in lat_table]
-        latencies.sort()
-        event_summary['latencies'] = latencies
-        event_summary['num_latencies'] = len(latencies)
-        event_summary['min_lat'] = latencies[0]
-        event_summary['max_lat'] = latencies[-1]
-        event_summary['average_lat'] = sum(latencies) / len(latencies)
-        event_summary['median'] = latencies[len(latencies) // 2]
-        event_summary['90pctile'] = latencies[percentile_to_index(
-            len(latencies), 90)]
-        event_summary['95pctile'] = latencies[percentile_to_index(
-            len(latencies), 95)]
-        summaries[event_pair.name] = event_summary
-    return summaries
-
-
-def get_summaries_from_log(input_file_name, config_xml=None):
-    """
-    End to end function to compute latencies and summaries from input file.
-    Returns a summary dictionary.
-
-    Arguments:
-      input_file_name: text file containing the timestamped events, like a
-                       log file.
-      config_xml: xml file specifying which event pairs to compute latency btwn.
-    Returns:
-      summaries: dict mapping event pair name to significant summary metrics.
-    """
-    config_xml = config_xml or os.path.join(os.path.dirname(__file__),
-                                            'latency.xml')
-    (event_pairs_by_pair_id, event_pairs_by_start_id,
-     event_pairs_by_end_id) = parse_xml(config_xml)
-    # Compute latencies
-    input_file = open(input_file_name, 'r')
-    lat_tables_by_pair_id = compute_latencies(input_file,
-                                              event_pairs_by_start_id,
-                                              event_pairs_by_end_id)
-    return get_summaries(event_pairs_by_pair_id, lat_tables_by_pair_id)
-
-
-if __name__ == '__main__':
-    # Parse command-line arguments
-    parser = argparse.ArgumentParser(
-        description='Processes profiling data to output latency numbers')
-    parser.add_argument(
-        '--events-config',
-        type=argparse.FileType('r'),
-        default=os.path.join(os.path.dirname(__file__), 'latency.xml'),
-        help='The configuration XML file for events.'
-             ' If not specified uses latency.xml from current folder')
-    parser.add_argument(
-        'input', type=argparse.FileType('r'), help='The input log')
-    args = parser.parse_args()
-    process_latencies(args.events_config, args.input)
diff --git a/src/antlion/controllers/buds_lib/latency.xml b/src/antlion/controllers/buds_lib/latency.xml
deleted file mode 100644
index 320979b..0000000
--- a/src/antlion/controllers/buds_lib/latency.xml
+++ /dev/null
@@ -1,22 +0,0 @@
-<?xml version="1.0"?>
-<root>
-    <event id="1" name="Button Down" />
-    <event id="3" name="Play/Pause Button Event" />
-    <event id="4" name="A2DP Start Ind" />
-    <event id="6" name="A2DP Start Streaming" />
-    <event id="9" name="AVRCP Play Notification" />
-    <event id="10" name="AVRCP Pause Notification" />
-    <event id="12" name="Voice Cmd Btn Held" />
-    <event id="13" name="Voice Cmd Btn Released" />
-
-    <!-- Event pairs that we are interested in measuring the latency of -->
-    <event-pair start-event="1" end-event="3" />
-    <event-pair start-event="1" end-event="12" />
-    <event-pair start-event="3" end-event="9" />
-    <event-pair start-event="9" end-event="6" />
-    <event-pair start-event="1" end-event="6" />
-    <event-pair start-event="3" end-event="10" />
-    <event-pair start-event="1" end-event="10" />
-    <event-pair start-event="12" end-event="13" />
-    <event-pair start-event="13" end-event="6" />
-</root>
diff --git a/src/antlion/controllers/buds_lib/logserial.py b/src/antlion/controllers/buds_lib/logserial.py
deleted file mode 100644
index 7b71f8d..0000000
--- a/src/antlion/controllers/buds_lib/logserial.py
+++ /dev/null
@@ -1,409 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import re
-import select
-import subprocess
-import sys
-import time
-import uuid
-from logging import Logger
-from threading import Thread
-
-import serial
-from serial.tools import list_ports
-
-from antlion.controllers.buds_lib import tako_trace_logger
-
-logging = tako_trace_logger.TakoTraceLogger(Logger(__file__))
-
-RETRIES = 0
-
-
-class LogSerialException(Exception):
-    """LogSerial Exception."""
-
-
-class PortCheck(object):
-    def get_serial_ports(self):
-        """Gets the computer available serial ports.
-
-        Returns:
-            Dictionary object with all the serial port names.
-        """
-        result = {}
-        ports = list_ports.comports()
-        for port_name, description, address in ports:
-            result[port_name] = (description, address)
-        return result
-
-    # TODO: Clean up this function. The boolean logic can be simplified.
-    def search_port_by_property(self, search_params):
-        """Search ports by a dictionary of the search parameters.
-
-        Args:
-            search_params: Dictionary object with the parameters
-                           to search. i.e:
-                           {'ID_SERIAL_SHORT':'213213',
-                           'ID_USB_INTERFACE_NUM': '01'}
-        Returns:
-            Array with the ports found
-        """
-        ports_result = []
-        for port in self.get_serial_ports():
-            properties = self.get_port_properties(port=port)
-            if properties:
-                properties_exists = True
-                for port_property in search_params:
-                    properties_exists *= (port_property in properties)
-                properties_exists = True if properties_exists == 1 else False
-                if properties_exists:
-                    found = True
-                    for port_property in search_params.keys():
-                        search_value = search_params[port_property]
-                        if properties[port_property] == search_value:
-                            found *= True
-                        else:
-                            found = False
-                            break
-                    found = True if found == 1 else False
-                    if found:
-                        ports_result.append(port)
-        return ports_result
-
-    def get_port_properties(self, port):
-        """Get all the properties from a given port.
-
-        Args:
-            port: String object with the port name. i.e. '/dev/ttyACM1'
-
-        Returns:
-            dictionary object with all the properties.
-        """
-        ports = self.get_serial_ports()
-        if port in ports:
-            result = {}
-            port_address = ports[port][1]
-            property_list = None
-            if sys.platform.startswith('linux') or sys.platform.startswith(
-                    'cygwin'):
-                try:
-                    command = 'udevadm info -q property -n {}'.format(port)
-                    property_list = subprocess.check_output(command, shell=True)
-                    property_list = property_list.decode(errors='replace')
-                except subprocess.CalledProcessError as error:
-                    logging.error(error)
-                if property_list:
-                    properties = filter(None, property_list.split('\n'))
-                    for prop in properties:
-                        p = prop.split('=')
-                        result[p[0]] = p[1]
-            elif sys.platform.startswith('win'):
-                regex = ('(?P<type>[A-Z]*)\sVID\:PID\=(?P<vid>\w*)'
-                         '\:(?P<pid>\w*)\s+(?P<adprop>.*$)')
-                m = re.search(regex, port_address)
-                if m:
-                    result['type'] = m.group('type')
-                    result['vid'] = m.group('vid')
-                    result['pid'] = m.group('pid')
-                    adprop = m.group('adprop').strip()
-                    if adprop:
-                        prop_array = adprop.split(' ')
-                        for prop in prop_array:
-                            p = prop.split('=')
-                            result[p[0]] = p[1]
-                    if 'LOCATION' in result:
-                        interface = int(result['LOCATION'].split('.')[1])
-                        if interface < 10:
-                            result['ID_USB_INTERFACE_NUM'] = '0{}'.format(
-                                interface)
-                        else:
-                            result['ID_USB_INTERFACE_NUM'] = '{}'.format(
-                                interface)
-                    win_vid_pid = '*VID_{}*PID_{}*'.format(result['vid'],
-                                                           result['pid'])
-                    command = (
-                            'powershell gwmi "Win32_USBControllerDevice |' +
-                            ' %{[wmi]($_.Dependent)} |' +
-                            ' Where-Object -Property PNPDeviceID -Like "' +
-                            win_vid_pid + '" |' +
-                            ' Where-Object -Property Service -Eq "usbccgp" |' +
-                            ' Select-Object -Property PNPDeviceID"')
-                    res = subprocess.check_output(command, shell=True)
-                    r = res.decode('ascii')
-                    m = re.search('USB\\\\.*', r)
-                    if m:
-                        result['ID_SERIAL_SHORT'] = (
-                            m.group().strip().split('\\')[2])
-            return result
-
-    def port_exists(self, port):
-        """Check if a serial port exists in the computer by the port name.
-
-        Args:
-            port: String object with the port name. i.e. '/dev/ttyACM1'
-
-        Returns:
-            True if it was found, False if not.
-        """
-        exists = port in self.get_serial_ports()
-        return exists
-
-
-class LogSerial(object):
-    def __init__(self,
-                 port,
-                 baudrate,
-                 bytesize=8,
-                 parity='N',
-                 stopbits=1,
-                 timeout=0.15,
-                 retries=0,
-                 flush_output=True,
-                 terminator='\n',
-                 output_path=None,
-                 serial_logger=None):
-        global RETRIES
-        self.set_log = False
-        self.output_path = None
-        self.set_output_path(output_path)
-        if serial_logger:
-            self.set_logger(serial_logger)
-        self.monitor_port = PortCheck()
-        if self.monitor_port.port_exists(port=port):
-            self.connection_handle = serial.Serial()
-            RETRIES = retries
-            self.reading = True
-            self.log = []
-            self.log_thread = Thread()
-            self.command_ini_index = None
-            self.is_logging = False
-            self.flush_output = flush_output
-            self.terminator = terminator
-            if port:
-                self.connection_handle.port = port
-            if baudrate:
-                self.connection_handle.baudrate = baudrate
-            if bytesize:
-                self.connection_handle.bytesize = bytesize
-            if parity:
-                self.connection_handle.parity = parity
-            if stopbits:
-                self.connection_handle.stopbits = stopbits
-            if timeout:
-                self.connection_handle.timeout = timeout
-            try:
-                self.open()
-            except Exception as e:
-                self.close()
-                logging.error(e)
-        else:
-            raise LogSerialException(
-                'The port {} does not exist'.format(port))
-
-    def set_logger(self, serial_logger):
-        global logging
-        logging = serial_logger
-        self.set_output_path(getattr(logging, 'output_path', '/tmp'))
-        self.set_log = True
-
-    def set_output_path(self, output_path):
-        """Set the output path for the flushed log.
-
-        Args:
-            output_path: String object with the path
-        """
-        if output_path:
-            if os.path.exists(output_path):
-                self.output_path = output_path
-            else:
-                raise LogSerialException('The output path does not exist.')
-
-    def refresh_port_connection(self, port):
-        """Will update the port connection without closing the read thread.
-
-        Args:
-            port: String object with the new port name. i.e. '/dev/ttyACM1'
-
-        Raises:
-            LogSerialException if the port is not alive.
-        """
-        if self.monitor_port.port_exists(port=port):
-            self.connection_handle.port = port
-            self.open()
-        else:
-            raise LogSerialException(
-                'The port {} does not exist'.format(port))
-
-    def is_port_alive(self):
-        """Verify if the current port is alive in the computer.
-
-        Returns:
-            True if its alive, False if its missing.
-        """
-        alive = self.monitor_port.port_exists(port=self.connection_handle.port)
-        return alive
-
-    def open(self):
-        """Will open the connection with the current port settings."""
-        while self.connection_handle.isOpen():
-            self.connection_handle.close()
-            time.sleep(0.5)
-        self.connection_handle.open()
-        if self.flush_output:
-            self.flush()
-        self.start_reading()
-        logging.info('Connection Open')
-
-    def close(self):
-        """Will close the connection and the read thread."""
-        self.stop_reading()
-        if self.connection_handle:
-            self.connection_handle.close()
-        if not self.set_log:
-            logging.flush_log()
-        self.flush_log()
-        logging.info('Connection Closed')
-
-    def flush(self):
-        """Will flush any input from the serial connection."""
-        self.write('\n')
-        self.connection_handle.flushInput()
-        self.connection_handle.flush()
-        flushed = 0
-        while True:
-            ready_r, _, ready_x = (select.select([self.connection_handle], [],
-                                                 [self.connection_handle], 0))
-            if ready_x:
-                logging.exception('exception from serial port')
-                return
-            elif ready_r:
-                flushed += 1
-                # This may cause underlying buffering.
-                self.connection_handle.read(1)
-                # Flush the underlying buffer too.
-                self.connection_handle.flush()
-            else:
-                break
-            if flushed > 0:
-                logging.debug('dropped >{} bytes'.format(flushed))
-
-    def write(self, command, wait_time=0.2):
-        """Will write into the serial connection.
-
-        Args:
-            command: String object with the text to write.
-            wait_time: Float object with the seconds to wait after the
-                       command was issued.
-        """
-        if command:
-            if self.terminator:
-                command += self.terminator
-            self.command_ini_index = len(self.log)
-            self.connection_handle.write(command.encode())
-            if wait_time:
-                time.sleep(wait_time)
-            logging.info('cmd [{}] sent.'.format(command.strip()))
-
-    def flush_log(self):
-        """Will output the log into a CSV file."""
-        if len(self.log) > 0:
-            path = ''
-            if not self.output_path:
-                self.output_path = os.getcwd()
-            elif not os.path.exists(self.output_path):
-                self.output_path = os.getcwd()
-            path = os.path.join(self.output_path,
-                                str(uuid.uuid4()) + '_serial.log')
-            with open(path, 'a') as log_file:
-                for info in self.log:
-                    log_file.write('{}, {}\n'.format(info[0], info[1]))
-
-    def read(self):
-        """Will read from the log the output from the serial connection
-        after a write command was issued. It will take the initial time
-        of the command as a reference.
-
-        Returns:
-            Array object with the log lines.
-        """
-        buf_read = []
-        command_end_index = len(self.log)
-        info = self.query_serial_log(self.command_ini_index, command_end_index)
-        for line in info:
-            buf_read.append(line[1])
-        self.command_ini_index = command_end_index
-        return buf_read
-
-    def get_all_log(self):
-        """Gets the log object that collects the logs.
-
-        Returns:
-            DataFrame object with all the logs.
-        """
-        return self.log
-
-    def query_serial_log(self, from_index, to_index):
-        """Will query the session log from a given time in EPOC format.
-
-        Args:
-            from_timestamp: Double value with the EPOC timestamp to start
-                            the search.
-            to_timestamp: Double value with the EPOC timestamp to finish the
-                          rearch.
-
-        Returns:
-            DataFrame with the result query.
-        """
-        if from_index < to_index:
-            info = self.log[from_index:to_index]
-            return info
-
-    def _start_reading_thread(self):
-        if self.connection_handle.isOpen():
-            self.reading = True
-            while self.reading:
-                try:
-                    data = self.connection_handle.readline().decode('utf-8')
-                    if data:
-                        self.is_logging = True
-                        data.replace('/n', '')
-                        data.replace('/r', '')
-                        data = data.strip()
-                        self.log.append([time.time(), data])
-                    else:
-                        self.is_logging = False
-                except Exception:
-                    time.sleep(1)
-            logging.info('Read thread closed')
-
-    def start_reading(self):
-        """Method to start the log collection."""
-        if not self.log_thread.isAlive():
-            self.log_thread = Thread(target=self._start_reading_thread, args=())
-            self.log_thread.daemon = True
-            try:
-                self.log_thread.start()
-            except(KeyboardInterrupt, SystemExit):
-                self.close()
-        else:
-            logging.warning('Not running log thread, is already alive')
-
-    def stop_reading(self):
-        """Method to stop the log collection."""
-        self.reading = False
-        self.log_thread.join(timeout=600)
diff --git a/src/antlion/controllers/buds_lib/tako_trace_logger.py b/src/antlion/controllers/buds_lib/tako_trace_logger.py
deleted file mode 100644
index 187cfdc..0000000
--- a/src/antlion/controllers/buds_lib/tako_trace_logger.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import logging
-
-from antlion import tracelogger
-
-
-class TakoTraceLogger(tracelogger.TraceLogger):
-    def __init__(self, *args, **kwargs):
-        super().__init__(*args, **kwargs)
-        self.d = self.debug
-        self.e = self.error
-        self.i = self.info
-        self.t = self.step
-        self.w = self.warning
-
-    def _logger_level(self, level_name):
-        level = logging.getLevelName(level_name)
-        return lambda *args, **kwargs: self._logger.log(level, *args, **kwargs)
-
-    def step(self, msg, *args, **kwargs):
-        """Delegate a step call to the underlying logger."""
-        self._log_with(self._logger_level('STEP'), 1, msg, *args, **kwargs)
-
-    def device(self, msg, *args, **kwargs):
-        """Delegate a device call to the underlying logger."""
-        self._log_with(self._logger_level('DEVICE'), 1, msg, *args, **kwargs)
-
-    def suite(self, msg, *args, **kwargs):
-        """Delegate a device call to the underlying logger."""
-        self._log_with(self._logger_level('SUITE'), 1, msg, *args, **kwargs)
-
-    def case(self, msg, *args, **kwargs):
-        """Delegate a case call to the underlying logger."""
-        self._log_with(self._logger_level('CASE'), 1, msg, *args, **kwargs)
-
-    def flush_log(self):
-        """This function exists for compatibility with Tako's logserial module.
-
-        Note that flushing the log is handled automatically by python's logging
-        module.
-        """
diff --git a/src/antlion/controllers/buds_lib/test_actions/__init__.py b/src/antlion/controllers/buds_lib/test_actions/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/buds_lib/test_actions/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/buds_lib/test_actions/agsa_acts.py b/src/antlion/controllers/buds_lib/test_actions/agsa_acts.py
deleted file mode 100644
index 3d03cbd..0000000
--- a/src/antlion/controllers/buds_lib/test_actions/agsa_acts.py
+++ /dev/null
@@ -1,153 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import datetime
-import re
-
-from antlion.controllers.adb_lib.error import AdbError
-from antlion.controllers.buds_lib.test_actions.base_test_actions import BaseTestAction
-from antlion.controllers.buds_lib.test_actions.base_test_actions import timed_action
-
-PHONE_DFU_PATH = ('/storage/emulated/0/Android/data/com.google.android'
-                  '.googlequicksearchbox/files/download_cache/apollo.dfu')
-
-AGSA_BROADCAST = (
-    'am  broadcast -a \'action_ota\' --es dfu_url %s --es build_label 9.9.9 '
-    '--ez is_force %s com.google.android.googlequicksearchbox/com.google'
-    '.android.apps.gsa.broadcastreceiver.CommonBroadcastReceiver')
-
-
-class AgsaOTAError(Exception):
-    """OTA Error"""
-
-
-class AgsaTestActions(BaseTestAction):
-    """AGSA test action library."""
-
-    def __init__(self, android_dev, logger=None):
-        """
-        Simple init code to keep the android object for future reference.
-        Args:
-           android_dev: devcontrollers.android_device.AndroidDevice
-        """
-        super(AgsaTestActions, self).__init__(logger)
-        self.dut = android_dev
-
-    @timed_action
-    def _initiate_agsa_ota(self, file_path, destination=None, force=True):
-        """Pushes the dfu file to phone and issues broadcast to start AGSA OTA
-
-        Args:
-            file_path: (string) path of dfu file
-            destination: (string) destination path on the phone uses
-                         $PHONE_DFU_PATH if not specified
-            force: (bool) option to force the issued broadcast?
-        """
-        if not destination:
-            destination = PHONE_DFU_PATH
-        if self.dut.push_file_to_phone(file_path, destination):
-            if force:
-                force = 'true'
-            else:
-                force = 'false'
-
-            command = AGSA_BROADCAST % (destination, force)
-            output = self.dut.adb.shell(command.split())
-            if 'result=0' in output:
-                self.logger.info('Agsa broadcast successful!')
-                return True
-            else:
-                self.logger.error('Agsa broadcast failed')
-                return False
-
-    @timed_action
-    def _wait_for_ota_to_finish(self, timeout=660):
-        """Logcat is continuously read to keep track of the OTA transfer
-
-        Args:
-           timeout: (int) time to wait before timing out.
-
-        Returns:
-            True on success
-
-        Raises: AgsaOTAError if the timeout is reached.
-        """
-        # regex that confirms completion
-        transfer_completion_match = \
-            re.compile('OTA progress: 100 %|OTA img rcvd')
-        # time now + $timeout
-        expiry_time = datetime.datetime.now() + \
-                      datetime.timedelta(seconds=timeout)
-        self.logger.info('Waiting for OTA transfer to complete....')
-        while True:
-            # time now - 1 minute: to be used in getting logs from a minute back
-            now_plus_minute = datetime.datetime.now() - \
-                              datetime.timedelta(seconds=5)
-            try:
-                # grep logcat for 'DeviceLog'
-                filtered_log = self.dut.logcat_filter_message(
-                    now_plus_minute.strftime('%m-%d %H:%M:%S.000'),
-                    'Devicelog:')
-                if filtered_log and \
-                        transfer_completion_match.search(filtered_log):
-                    self.logger.info('Transfer completed!')
-                    break
-            except AdbError:
-                # gets thrown if no matching string is found
-                pass
-            if datetime.datetime.now() > expiry_time:
-                self.logger.error('Timed out waiting for OTA to complete.')
-                raise AgsaOTAError('Timed out waiting for OTA to complete.')
-        return True
-
-    @timed_action
-    def initiate_agsa_and_wait_until_transfer(self, file_path, destination=None,
-                                              force=True, timeout=660):
-        """Calls _initiate_agsa_ota and _wait_for_ota_to_finish
-
-        Returns:
-            True on success and False otherwise
-        """
-        self._initiate_agsa_ota(file_path, destination, force)
-        return self._wait_for_ota_to_finish(timeout)
-
-    @timed_action
-    def install_agsa(self, version, force=False):
-        """
-        Installs the specified version of AGSA if different from the one
-        currently installed, unless force is set to True.
-
-        Args:
-            version: (string) ex: '7.14.21.release'
-            force: (bool) installs only if currently installed version is
-                   different than the one to be installed. True installs
-                   by-passing version check
-        Return:
-            True on Success and False otherwise
-        """
-        # get currently installed version, and install agsa only if different
-        # from what is requested
-        current_version = self.dut.get_agsa_version()
-        if (not (version.replace('alpha', '').replace('release', '')
-                 in current_version)) or force:
-            self.logger.info('Current AGSA version is %s' % current_version)
-            self.logger.info('Installing AGSA version %s...' % version)
-            if self.and_actions.install_agsa(version):
-                self.logger.info('Install success!')
-                return True
-            else:
-                self.logger.error('Failed to install version %s' % version)
-                return False
diff --git a/src/antlion/controllers/buds_lib/test_actions/apollo_acts.py b/src/antlion/controllers/buds_lib/test_actions/apollo_acts.py
deleted file mode 100644
index ac2fa6d..0000000
--- a/src/antlion/controllers/buds_lib/test_actions/apollo_acts.py
+++ /dev/null
@@ -1,616 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-A comprehensive interface for performing test actions on an Apollo device.
-"""
-
-import time
-
-from antlion.controllers.android_lib.tel.tel_utils import initiate_call
-from antlion.controllers.android_lib.tel.tel_utils import wait_for_droid_in_call
-from antlion.controllers.buds_lib.apollo_lib import DeviceError
-from antlion.controllers.buds_lib.test_actions.agsa_acts import AgsaOTAError
-from antlion.controllers.buds_lib.test_actions.base_test_actions import BaseTestAction
-from antlion.controllers.buds_lib.test_actions.base_test_actions import timed_action
-from antlion.controllers.buds_lib.test_actions.bt_utils import BTUtils
-from antlion.libs.utils.timer import TimeRecorder
-from antlion.utils import wait_until
-
-PACKAGE_NAME_AGSA = 'com.google.android.googlequicksearchbox'
-PACKAGE_NAME_GMS = 'com.google.android.gms'
-PACKAGE_NAME_NEARBY = 'com.google.android.gms.policy_nearby'
-PACKAGE_NAME_SETTINGS = 'com.android.settings'
-BISTO_MP_DETECT_HEADER = 'Pixel Buds'
-BISTO_MP_DEVICE_TEXT = 'Pixel Buds'
-BISTO_MP_DETECT_TEXT = BISTO_MP_DETECT_HEADER + BISTO_MP_DEVICE_TEXT
-BISTO_MP_CANCEL_TEXT = 'CANCEL'
-BISTO_MP_CONNECT_TEXT = 'TAP TO CONNECT'
-BISTO_MP_CONNECT_FAIL_TEXT = 'Can\'t connect to'
-BISTO_MP_CONNECT_RETRY_TEXT = 'TRY AGAIN'
-BISTO_MP_CONNECTED_TEXT = 'Now set up your Google Assistant'
-BISTO_MP_CONNECTED_EXIT_TEXT = 'NO THANKS'
-BISTO_MP_EXIT_PROMPT_TEXT = 'Exit setup?'
-BISTO_MP_EXIT_CONFIRM_TEXT = 'EXIT'
-PROFILES_CONNECTED = {
-    'HFP(pri.)': 'TRUE',
-    'A2DP(pri)': 'TRUE',
-}
-PROFILES_DISCONNECTED = {
-    'HFP(pri.)': 'FALSE',
-    'A2DP(pri)': 'FALSE',
-}
-COMP_PROFILE_CONNECTED = {'Comp': 'TRUE'}
-COMP_PROFILE_DISCONNECTED = {'Comp': 'FALSE'}
-AVRCPSTATUS = 'AvrcpPlayPause'
-DEFAULT_TIMEOUT = 60  # wait 60 seconds max for bond/connect.
-DEFAULT_CMD_INTERVAL = 0.5  # default interval between serial commands
-DEFAULT_CMD_RETRY = 5  # default retry times when a command failed.
-DEFAULT_BT_PROFILES = [
-    'HFP Pri', 'HFP Sec', 'A2DP Pri', 'A2DP Sec', 'CTRL', 'AUDIO', 'DEBUG',
-    'TRANS'
-]
-DEFAULT_BT_STATUS = ['A2DP(pri)', 'HFP(pri.)', 'Comp']
-
-
-class TestActsError(Exception):
-    """Exception from Apollo Acts Error."""
-
-
-class ApolloTestActions(BaseTestAction):
-    """Test action class for all Apollo test actions."""
-
-    def __init__(self, apollo_dev, logger=None):
-        """
-        Args:
-             apollo_dev: apollo.lib.apollo_lib.Device the Apollo device
-        """
-        super(ApolloTestActions, self).__init__(logger)
-        self.dut = apollo_dev
-        # need a embedded timer for connection time measurements.
-        self.measurement_timer = TimeRecorder()
-
-    def bluetooth_get_status(self):
-        status = self.dut.get_bt_status()
-        self.logger.info(status)
-
-    def wait_for_bluetooth_disconnection(self, timeout=60):
-        """ Set pairing mode and disconnect.
-
-        This action will wait until the apollo profiles are false.
-
-        Args:
-             timeout: integer, timeout value in seconds.
-        """
-        result = True
-        apollo_status = self.dut.get_bt_status()
-        self.logger.info('Waiting for the disconnection.')
-        time.sleep(1)
-        ini_time = time.time()
-        while len(apollo_status) != len(
-            [s for s in apollo_status.values() if s == 'FALSE']):
-            apollo_status = self.dut.get_bt_status()
-            if (time.time() - ini_time) > timeout:
-                self.logger.warning('Timeout waiting for the disconnection.')
-                return False
-            time.sleep(1)
-        return result
-
-    def pair(self, phone, companion_app=True):
-        """Pairs phone with apollo and validates bluetooth profiles.
-
-        Args:
-            phone: android phone
-            apollo: apollo device
-            companion_app (optional): True if the phone has a companion app
-                                      installed. False otherwise.
-
-        Raises:
-            TestActsError: Bluetooth pairing failed/ Dut BT status check failed.
-        """
-        bt_util = BTUtils()
-        target_addr = self.dut.bluetooth_address
-        if bt_util.android_device_in_connected_state(phone, target_addr):
-            self.logger.info('Already paired and connected, skipping pairing.')
-        else:
-            if bt_util.android_device_in_paired_state(phone, target_addr):
-                self.logger.info(
-                    'Device is paired but not connected, unpair first.')
-                if not bt_util.bt_unpair(phone, self.dut):
-                    raise TestActsError('Unable to unpair the device')
-            bt_util.bt_pair_and_connect(phone, self.dut)
-            self.logger.info('DEVICE PAIRED')
-            if companion_app:
-                profiles = PROFILES_CONNECTED.copy()
-                profiles.update(COMP_PROFILE_CONNECTED)
-            else:
-                profiles = PROFILES_CONNECTED
-            self.logger.info(profiles)
-            if not bt_util.check_device_bt(device=self.dut, profiles=profiles):
-                raise TestActsError('Dut BT status check failed.')
-            else:
-                return True
-
-    def unpair(self, phone, companion_app=True, factory_reset_dut=True):
-        """Unpairs phone from apollo and validates bluetooth profiles.
-
-        Args:
-            phone: android phone
-            apollo: apollo device
-            companion_app (optional): True if the phone has a companion app
-                                      installed. False otherwise.
-
-        Raises:
-            TestActsError: Bluetooth unpairing/Dut BT status check failed.
-        """
-        bt_util = BTUtils()
-        target_addr = self.dut.bluetooth_address
-        if not bt_util.android_device_in_paired_state(phone, target_addr):
-            self.logger.info('Device is already unpaired, skipping unpairing.')
-        else:
-            result = bt_util.bt_unpair(
-                phone, self.dut, factory_reset_dut=factory_reset_dut)
-            if not result:
-                raise TestActsError('Bluetooth unpairing failed.')
-            if companion_app:
-                profiles = PROFILES_DISCONNECTED.copy()
-                profiles.update(COMP_PROFILE_DISCONNECTED)
-            else:
-                profiles = PROFILES_DISCONNECTED
-            if not bt_util.check_device_bt(device=self.dut, profiles=profiles):
-                raise TestActsError('Dut BT status check failed.')
-            else:
-                return True
-
-    def is_paired(self, phone):
-        """Check if the given apollo is paired with the android device.
-
-        Args:
-            phone: android phone
-            apollo: apollo device
-
-        Returns:
-            Bool: True if apollo is paired with the phone.
-        """
-        bt_util = BTUtils()
-        target_addr = self.dut.bluetooth_address
-        return bt_util.android_device_in_paired_state(phone, target_addr)
-
-    def send_music_play_event_and_validate(self):
-        """Send the play event on Apollo and validate the response and DSP
-        Status.
-
-        Raises:
-            TestActsError: Error while playing the music.
-        """
-        play_detection_timeout = 1
-        if self.dut.is_streaming():
-            self.logger.info('Music already streaming. Skipping play event..')
-            return
-        self.logger.info('Playing video...')
-        is_played = self.dut.music_control_events(
-            AVRCPSTATUS, self.dut.apollo_log_regex.AVRCP_PLAY_REGEX)
-        if not is_played:
-            self.logger.error('AVRCP Played status not found')
-            raise TestActsError('AVRCP Played status not found.')
-        wait_until(
-            lambda: self.dut.is_streaming(),
-            play_detection_timeout,
-            sleep_s=0.25)
-        if not self.dut.is_streaming():
-            self.logger.error('Device is NOT in a deviceA2DPStreaming state')
-            raise TestActsError(
-                'Device is NOT in a deviceA2DPStreaming state.')
-
-    def send_music_pause_event_and_validate(self):
-        """Send the pause event on Apollo and validate the responses and DSP
-        Status.
-
-        Raises:
-            TestActsError: Error while pausing the music.
-        """
-        paused_detection_timeout = 10
-        if not self.dut.is_streaming():
-            self.logger.info('Music not streaming. Skipping pause event..')
-            return
-        self.logger.info("Pausing video...")
-        is_paused = self.dut.music_control_events(
-            AVRCPSTATUS, self.dut.apollo_log_regex.AVRCP_PAUSE_REGEX)
-        if not is_paused:
-            self.logger.error('AVRCP Paused statue not found')
-            raise TestActsError('AVRCP Paused status not found.')
-        wait_until(
-            lambda: not self.dut.is_streaming(),
-            paused_detection_timeout,
-            sleep_s=0.25)
-        if self.dut.is_streaming():
-            self.logger.error('Device is still in deviceA2DPStreaming state')
-            raise TestActsError(
-                'Device is still in deviceA2DPStreaming state.')
-
-    def vol_down_and_validate(self):
-        """Send volume down twice and validate by comparing two levels
-
-        Raises:
-            TestActsError: Error
-        """
-        self.logger.info('Decreasing volume')
-        before_vol = self.dut.volume('Down', 1)
-        time.sleep(2)
-        after_vol = self.dut.volume('Down', 1)
-        if not after_vol or not before_vol or after_vol >= before_vol:
-            self.logger.error(
-                'Unable to decrease the volume. Before: %s. After: %s' %
-                (before_vol, after_vol))
-            raise TestActsError('error decreasing volume')
-
-    def vol_up_and_validate(self):
-        """Send volume up twice and validate by comparing two levels
-
-        Raises:
-            TestActsError: Error
-        """
-        self.logger.info('Increasing volume')
-        before_vol = self.dut.volume('Up', 1)
-        time.sleep(2)
-        after_vol = self.dut.volume('Up', 1)
-        if not after_vol or not before_vol or after_vol <= before_vol:
-            self.logger.error(
-                'Unable to increase the volume. Before: %s. After: %s' %
-                (before_vol, after_vol))
-            raise TestActsError('error increasing volume')
-
-    def call_and_validate_ringing(self,
-                                  calling_phone,
-                                  number_to_call,
-                                  call_retries=10):
-        for i in range(call_retries):
-            initiate_call(self.logger, calling_phone, number_to_call)
-            is_calling = wait_for_droid_in_call(
-                self.logger, calling_phone, max_time=10)
-            if is_calling:
-                self.logger.info('Call initiated!')
-                break
-            else:
-                self.logger.warning('Call is not initiating.')
-                if i == call_retries:
-                    self.logger.error('Call initiation retries exhausted')
-                    raise TestActsError(
-                        '%s retries failed to initiate the call' %
-                        (call_retries))
-            self.logger.warning('Retrying call...')
-        # wait for offhook state and return
-        wait_until(
-            (lambda: calling_phone.droid.telecomGetCallState() == 'OFFHOOK'),
-            timeout_s=40,
-            condition=True,
-            sleep_s=.5)
-        self.logger.info('Phone call initiated on %s' % calling_phone.serial)
-
-    def answer_phone_and_validate_call_received(self, receiving_phone):
-        # wait until the phone rings (assumes that a call is initiated prior to
-        # running the command)
-        wait_until(
-            lambda: receiving_phone.droid.telecomGetCallState() == 'RINGING',
-            timeout_s=40,
-            condition=True,
-            sleep_s=.5)
-        self.logger.info('Ring detected on %s - now answering the call...' %
-                         (receiving_phone.serial))
-        # answer the phone call
-        self.dut.tap()
-        # wait until OFFHOOK state
-        wait_until(
-            lambda: receiving_phone.droid.telecomGetCallState() == 'OFFHOOK',
-            timeout_s=40,
-            condition=True,
-            sleep_s=.5)
-
-    def hangup_phone_and_validate_call_hung(self, receiving_phone):
-        # wait for phone to be in OFFHOOK state (assumed that a call is answered
-        # and engaged)
-        wait_until(
-            lambda: receiving_phone.droid.telecomGetCallState() == 'OFFHOOK',
-            timeout_s=40,
-            condition=True,
-            sleep_s=.5)
-        # end the call (post and pre 1663 have different way of ending call)
-        self.logger.info(
-            'Hanging up the call on %s...' % receiving_phone.serial)
-        if self.dut.version < 1663:
-            self.dut.tap()
-        else:
-            self.dut.hold(duration=100)
-        # wait for idle state
-        wait_until(
-            lambda: receiving_phone.droid.telecomGetCallState() == 'IDLE',
-            timeout_s=40,
-            condition=True,
-            sleep_s=.5)
-
-    @timed_action
-    def factory_reset(self):
-        ret = False
-        try:
-            self.dut.factory_reset()
-            ret = True
-        except DeviceError as ex:
-            self.logger.warning('Failed to reset Apollo: %s' % ex)
-        return ret
-
-    @timed_action
-    def wait_for_magic_pairing_notification(self, android_act, timeout=60):
-        dut_detected = False
-        start_time = time.time()
-        self.logger.info('Waiting for MP prompt: %s' % BISTO_MP_DEVICE_TEXT)
-        while not dut_detected:
-            android_act.dut.ui_util.uia.wait.update()
-            self.sleep(1)
-            if android_act.dut.ui_util.uia(
-                    textContains=BISTO_MP_DETECT_HEADER, enabled=True).exists:
-                if android_act.dut.ui_util.uia(
-                        textContains=BISTO_MP_DEVICE_TEXT,
-                        enabled=True).exists:
-                    self.logger.info('DUT Apollo MP prompt detected!')
-                    dut_detected = True
-                else:
-                    self.logger.info(
-                        'NONE DUT Apollo MP prompt detected! Cancel and RETRY!'
-                    )
-                    android_act.dut.ui_util.click_by_text(BISTO_MP_CANCEL_TEXT)
-            if time.time() - start_time > timeout:
-                break
-        if not dut_detected:
-            self.logger.info(
-                'Failed to get %s MP prompt' % BISTO_MP_DEVICE_TEXT)
-        return dut_detected
-
-    @timed_action
-    def start_magic_pairing(self, android_act, timeout=30, retries=3):
-        paired = False
-        android_act.dut.ui_util.click_by_text(
-            BISTO_MP_CONNECT_TEXT, timeout=timeout)
-        connect_start_time = time.time()
-        count = 0
-        timeout = 30
-
-        while not paired and count < retries:
-            android_act.dut.ui_util.uia.wait.update()
-            self.sleep(1)
-            if time.time() - connect_start_time > timeout:
-                self.logger.info('Time out! %s seconds' % time)
-                android_act.app_force_close_agsa()
-                self.logger.info('Timeout(s): %s' % timeout)
-                break
-            if android_act.dut.ui_util.uia(
-                    textContains=BISTO_MP_CONNECT_FAIL_TEXT,
-                    enabled=True).exists:
-                count += 1
-                self.logger.info('MP FAILED! Retry %s.' % count)
-                android_act.dut.ui_util.click_by_text(
-                    BISTO_MP_CONNECT_RETRY_TEXT)
-                connect_start_time = time.time()
-            elif android_act.dut.ui_util.uia(
-                    textContains=BISTO_MP_CONNECTED_TEXT, enabled=True).exists:
-                self.logger.info('MP SUCCESSFUL! Exiting AGSA...')
-                paired = True
-                android_act.dut.ui_util.click_by_text(
-                    BISTO_MP_CONNECTED_EXIT_TEXT)
-                android_act.dut.ui_util.wait_for_text(
-                    BISTO_MP_EXIT_PROMPT_TEXT)
-                android_act.dut.ui_util.click_by_text(
-                    BISTO_MP_EXIT_CONFIRM_TEXT)
-        return paired
-
-    @timed_action
-    def turn_bluetooth_on(self):
-        self.dut.cmd('pow 1')
-        return True
-
-    @timed_action
-    def turn_bluetooth_off(self):
-        self.dut.cmd('pow 0')
-        return True
-
-    @timed_action
-    def wait_for_bluetooth_a2dp_hfp(self,
-                                    timeout=DEFAULT_TIMEOUT,
-                                    interval=DEFAULT_CMD_INTERVAL):
-        """Wait for BT connection by checking if A2DP and HFP connected.
-
-        This is used for BT pair+connect test.
-
-        Args:
-            timeout: float, timeout value in second.
-            interval: float, float, interval between polling BT profiles.
-            timer: TimeRecorder, time recorder to save the connection time.
-        """
-        # Need to check these two profiles
-        pass_profiles = ['A2DP Pri', 'HFP Pri']
-        # TODO(b/122730302): Change to just raise an error
-        ret = False
-        try:
-            ret = self._wait_for_bluetooth_profile_connection(
-                pass_profiles, timeout, interval, self.measurement_timer)
-        except DeviceError as ex:
-            self.logger.warning('Failed to wait for BT connection: %s' % ex)
-        return ret
-
-    def _wait_for_bluetooth_profile_connection(self, profiles_to_check,
-                                               timeout, interval, timer):
-        """A generic method to wait for specified BT profile connection.
-
-        Args:
-            profiles_to_check: list, profile names (A2DP, HFP, etc.) to be
-                               checked.
-            timeout: float, timeout value in second.
-            interval: float, interval between polling BT profiles.
-            timer: TimeRecorder, time recorder to save the connection time.
-
-        Returns:
-            bool, True if checked profiles are connected, False otherwise.
-        """
-        timer.start_timer(profiles_to_check, force=True)
-        start_time = time.time()
-        while time.time() - start_time < timeout:
-            profiles = self._bluetooth_check_profile_connection()
-            for profile in profiles:
-                if profiles[profile]:
-                    timer.stop_timer(profile)
-            # now check if the specified profile connected.
-            all_connected = True
-            for profile in profiles_to_check:
-                if not profiles[profile]:
-                    all_connected = False
-                    break
-            if all_connected:
-                return True
-            time.sleep(interval)
-        # make sure the profile timer are stopped.
-        timer.stop_timer(profiles_to_check)
-        return False
-
-    def _bluetooth_check_profile_connection(self):
-        """Return profile connection in a boolean dict.
-
-        key=<profile name>, val = T/F
-        """
-        profiles = dict()
-        output = self.dut.get_conn_devices()
-        # need to strip all whitespaces.
-        conn_devs = {}
-
-        for key in output:
-            conn_devs[key.strip()] = output[key].strip()
-        for key in conn_devs:
-            self.logger.info('%s:%s' % (key, conn_devs[key]))
-            if 'XXXXXXXX' in conn_devs[key]:
-                profiles[key] = conn_devs[key]
-            else:
-                profiles[key] = False
-        return profiles
-
-    @timed_action
-    def wait_for_bluetooth_status_connection_all(
-            self, timeout=DEFAULT_TIMEOUT, interval=DEFAULT_CMD_INTERVAL):
-        """Wait for BT connection by checking if A2DP, HFP and COMP connected.
-
-        This is used for BT reconnect test.
-
-        Args:
-            timeout: float, timeout value in second.
-            interval: float, float, interval between polling BT profiles.
-        """
-        ret = False
-        self.measurement_timer.start_timer(DEFAULT_BT_STATUS, force=True)
-        # All profile not connected by default.
-        connected_status = {key: False for key in DEFAULT_BT_STATUS}
-        start_time = time.time()
-        while time.time() < start_time + timeout:
-            try:
-                time.sleep(interval)
-                status = self.dut.get_bt_status()
-                for key in DEFAULT_BT_STATUS:
-                    if (not connected_status[key] and key in status
-                            and 'TRUE' == status[key]):
-                        self.measurement_timer.stop_timer(key)
-                        connected_status[key] = True
-                        self.logger.info(
-                            'BT status %s connected at %fs.' %
-                            (key, self.measurement_timer.elapsed(key)))
-                if False not in connected_status.values():
-                    ret = True
-                    break
-            except DeviceError as ex:
-                self.logger.warning(
-                    'Device exception when waiting for reconnection: %s' % ex)
-        self.measurement_timer.stop_timer(DEFAULT_BT_STATUS)
-        return ret
-
-    def initiate_ota_via_agsa_verify_transfer_completion_in_logcat(
-            self,
-            agsa_action,
-            dfu_path,
-            destination=None,
-            force=True,
-            apply_image=True,
-            reconnect=True):
-        """
-        Starts an OTA by issuing an intent to AGSA after copying the dfu file to
-        the appropriate location on the phone
-
-        Args:
-            agsa_action: projects.agsa.lib.test_actions.agsa_acts
-                         .AgsaTestActions
-            dfu_path: string - absolute path of dfu file
-            destination: string - absolute path of file on phone if not
-                         specified will use
-                         /storage/emulated/0/Android/data/com.google.android
-                         .googlequicksearchbox/files/download_cache/apollo.dfu
-            force: value set in the intent sent to AGSA
-            True if success False otherwise
-        """
-        try:
-            agsa_action.initiate_agsa_and_wait_until_transfer(
-                dfu_path, destination=destination, force=force)
-            if apply_image:
-                # set in case
-                self.dut.set_in_case(reconnect=reconnect)
-        except AgsaOTAError as ex:
-            self.logger.error('Failed to OTA via AGSA %s' % ex)
-            return False
-        except DeviceError as ex:
-            self.logger.error('Failed to bring up device %s' % ex)
-            return False
-        return True
-
-    @timed_action
-    def wait_for_bluetooth_a2dp_hfp_rfcomm_connect(
-            self, address, timeout=DEFAULT_TIMEOUT,
-            interval=DEFAULT_CMD_INTERVAL):
-        """Wait for BT reconnection by checking if A2DP, HFP and COMP connected
-        to the specified address.
-
-        This is used for BT connection switch test.
-
-        Args:
-            address: str, MAC of the address to connect.
-            timeout: float, timeout value in second.
-            interval: float, float, interval between polling BT profiles.
-
-        Returns:
-            True if the specified address is connected. False otherwise.
-        """
-        last_4_hex = address.replace(':', '')[-4:].lower()
-        profiles_to_check = ['HFP Pri', 'A2DP Pri', 'CTRL', 'AUDIO']
-        self.measurement_timer.start_timer(profiles_to_check, force=True)
-        end_time = time.time() + timeout
-        all_connected = True
-        while time.time() < end_time:
-            all_connected = True
-            profiles = self._bluetooth_check_profile_connection()
-            for profile in profiles_to_check:
-                if (profile in profiles and profiles[profile]
-                        and last_4_hex in profiles[profile].lower()):
-                    self.measurement_timer.stop_timer(profile)
-                else:
-                    all_connected = False
-            if all_connected:
-                break
-            time.sleep(interval)
-        # make sure the profile timer are stopped.
-        self.measurement_timer.stop_timer(profiles_to_check)
-
-        return all_connected
diff --git a/src/antlion/controllers/buds_lib/test_actions/audio_utils.py b/src/antlion/controllers/buds_lib/test_actions/audio_utils.py
deleted file mode 100644
index fdc4bfa..0000000
--- a/src/antlion/controllers/buds_lib/test_actions/audio_utils.py
+++ /dev/null
@@ -1,99 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""A generic library for audio related test actions"""
-
-import datetime
-import time
-
-from antlion import utils
-from antlion.controllers.buds_lib import tako_trace_logger
-
-
-class AudioUtilsError(Exception):
-    """Generic AudioUtils Error."""
-
-
-class AudioUtils(object):
-    """A utility that manages generic audio interactions and actions on one or
-    more devices under test.
-
-    To be maintained such that it is compatible with any devices that pair with
-    phone.
-    """
-
-    def __init__(self):
-        self.logger = tako_trace_logger.TakoTraceLogger()
-
-    def play_audio_into_device(self, audio_file_path, audio_player, dut):
-        """Open mic on DUT, play audio into DUT, close mic on DUT.
-
-        Args:
-            audio_file_path: the path to the audio file to play, relative to the
-                           audio_player
-            audio_player: the device from which to play the audio file
-            dut: the device with the microphone
-
-        Returns:
-            bool: result of opening and closing DUT mic
-        """
-
-        if not dut.open_mic():
-            self.logger.error('DUT open_mic did not return True')
-            return False
-        audio_player.play(audio_file_path)
-        if not dut.close_mic():
-            self.logger.error('DUT close_mic did not return True.')
-            return False
-        return True
-
-    def get_agsa_interpretation_of_audio_file(self, audio_file_path,
-                                              target_interpretation,
-                                              audio_player, dut,
-                                              android_device):
-        """Gets AGSA interpretation from playing audio into DUT.
-
-        **IMPORTANT**: AGSA on android device must be connected to DUT and able
-        to receive info from DUT mic.
-
-        Args:
-          audio_file_path: the path to the audio file to play, relative to the
-                           audio_player
-          target_interpretation: what agsa interpretation should be
-          audio_player: the device from which to play the audio file
-          dut: the device with the microphone
-          android_device: android device to which dut is connected
-
-        Returns:
-          interpretation: agsa interpretation of audio file
-          score: similarity score between interpretation and target
-                 interpretation
-        """
-
-        play_start_time = datetime.datetime.now()
-        interpretation, score = '', 0.0
-        if self.play_audio_into_device(audio_file_path=audio_file_path,
-                                       audio_player=audio_player,
-                                       dut=dut):
-            time.sleep(1)
-            interpretation = android_device.agsa_interpretation(
-                cutoff_time=play_start_time,
-                target_interpretation=target_interpretation,
-                source='bisto')
-            score = utils.string_similarity(target_interpretation,
-                                            interpretation)
-
-        return interpretation, score
diff --git a/src/antlion/controllers/buds_lib/test_actions/base_test_actions.py b/src/antlion/controllers/buds_lib/test_actions/base_test_actions.py
deleted file mode 100644
index 518f9c6..0000000
--- a/src/antlion/controllers/buds_lib/test_actions/base_test_actions.py
+++ /dev/null
@@ -1,194 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Base test action class, provide a base class for representing a collection of
-test actions.
-"""
-
-import datetime
-import inspect
-import time
-
-from antlion.controllers.buds_lib import tako_trace_logger
-from antlion.libs.utils.timer import TimeRecorder
-
-# All methods start with "_" are considered hidden.
-DEFAULT_HIDDEN_ACTION_PREFIX = '_'
-
-
-def timed_action(method):
-    """A common decorator for test actions."""
-
-    def timed(self, *args, **kw):
-        """Log the enter/exit/time of the action method."""
-        func_name = self._convert_default_action_name(method.__name__)
-        if not func_name:
-            func_name = method.__name__
-        self.log_step('%s...' % func_name)
-        self.timer.start_timer(func_name, True)
-        result = method(self, *args, **kw)
-        # TODO: Method run time collected can be used for automatic KPI checks
-        self.timer.stop_timer(func_name)
-        return result
-
-    return timed
-
-
-class TestActionNotFoundError(Exception):
-    pass
-
-
-class BaseTestAction(object):
-    """Class for organizing a collection of test actions.
-
-    Test actions are just normal python methods, and should perform a specified
-    action. @timed_action decorator can log the entry/exit of the test action,
-    and the execution time.
-
-    The BaseTestAction class also provides a mapping between human friendly
-    names and test action methods in order to support configuration base
-    execution. By default, all methods not hidden (not start with "_") is
-    exported as human friendly name by replacing "_" with space.
-
-    Test action method can be called directly, or via
-    _perform_action(<human friendly name>, <args...>)
-    method.
-    """
-
-    @classmethod
-    def _fill_default_action_map(cls):
-        """Parse current class and get all test actions methods."""
-        # a <human readable name>:<method name> map.
-        cls._action_map = dict()
-        for name, _ in inspect.getmembers(cls, inspect.ismethod):
-            act_name = cls._convert_default_action_name(name)
-            if act_name:
-                cls._action_map[act_name] = name
-
-    @classmethod
-    def _convert_default_action_name(cls, func_name):
-        """Default conversion between method name -> human readable action name.
-        """
-        if not func_name.startswith(DEFAULT_HIDDEN_ACTION_PREFIX):
-            act_name = func_name.lower()
-            act_name = act_name.replace('_', ' ')
-            act_name = act_name.title()
-            return act_name.strip()
-        else:
-            return ''
-
-    @classmethod
-    def _add_action_alias(cls, default_act_name, alias):
-        """Add an alias to an existing test action."""
-        if default_act_name in cls._action_map:
-            cls._action_map[alias] = cls._action_map[default_act_name]
-            return True
-        else:
-            return False
-
-    @classmethod
-    def _get_action_names(cls):
-        if not hasattr(cls, '_action_map'):
-            cls._fill_default_action_map()
-        return cls._action_map.keys()
-
-    @classmethod
-    def get_current_time_logcat_format(cls):
-        return datetime.datetime.now().strftime('%m-%d %H:%M:%S.000')
-
-    @classmethod
-    def _action_exists(cls, action_name):
-        """Verify if an human friendly action name exists or not."""
-        if not hasattr(cls, '_action_map'):
-            cls._fill_default_action_map()
-        return action_name in cls._action_map
-
-    @classmethod
-    def _validate_actions(cls, action_list):
-        """Verify if an human friendly action name exists or not.
-
-        Args:
-          :param action_list: list of actions to be validated.
-
-        Returns:
-          tuple of (is valid, list of invalid/non-existent actions)
-        """
-        not_found = []
-        for action_name in action_list:
-            if not cls._action_exists(action_name):
-                not_found.append(action_name)
-        all_valid = False if not_found else True
-        return all_valid, not_found
-
-    def __init__(self, logger=None):
-        if logger is None:
-            self.logger = tako_trace_logger.TakoTraceLogger()
-            self.log_step = self.logger.step
-        else:
-            self.logger = logger
-            self.log_step = self.logger.info
-        self.timer = TimeRecorder()
-        self._fill_default_action_map()
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, *args):
-        pass
-
-    def _perform_action(self, action_name, *args, **kwargs):
-        """Perform the specified human readable action."""
-        if action_name not in self._action_map:
-            raise TestActionNotFoundError('Action %s not found this class.'
-                                          % action_name)
-
-        method = self._action_map[action_name]
-        ret = getattr(self, method)(*args, **kwargs)
-        return ret
-
-    @timed_action
-    def print_actions(self):
-        """Example action methods.
-
-        All test action method must:
-            1. return a value. False means action failed, any other value means
-               pass.
-            2. should not start with "_". Methods start with "_" is hidden.
-        All test action method may:
-            1. have optional arguments. Mutable argument can be used to pass
-               value
-            2. raise exceptions. Test case class is expected to handle
-               exceptions
-        """
-        num_acts = len(self._action_map)
-
-        self.logger.info('I can do %d action%s:' %
-                      (num_acts, 's' if num_acts != 1 else ''))
-        for act in self._action_map.keys():
-            self.logger.info(' - %s' % act)
-        return True
-
-    @timed_action
-    def sleep(self, seconds):
-        self.logger.info('%s seconds' % seconds)
-        time.sleep(seconds)
-
-
-if __name__ == '__main__':
-    acts = BaseTestAction()
-    acts.print_actions()
-    acts._perform_action('print actions')
-    print(acts._get_action_names())
diff --git a/src/antlion/controllers/buds_lib/test_actions/bt_utils.py b/src/antlion/controllers/buds_lib/test_actions/bt_utils.py
deleted file mode 100644
index a650e5f..0000000
--- a/src/antlion/controllers/buds_lib/test_actions/bt_utils.py
+++ /dev/null
@@ -1,294 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# TODO: In the future to decide whether to move it to a common directory rather
-# than the one specific to apollo.
-# TODO: The move is contingent on understanding the functions that should be
-# supported by the dut device (sec_device).
-
-"""A generic library with bluetooth related functions. The connection is assumed
-to be between and android phone with any dut (referred to as secondary device)
-device that supports the following calls:
-        sec_device.turn_on_bluetooth()
-        sec_device.is_bt_enabled():
-        sec_device.bluetooth_address
-        sec_device.set_pairing_mode()
-        sec_device.factory_reset()
-
-"""
-import queue
-import time
-from logging import Logger
-
-from antlion import asserts
-from antlion.controllers.buds_lib import tako_trace_logger
-from antlion.utils import TimeoutError
-from antlion.utils import wait_until
-
-# Add connection profile for future devices in this dictionary
-WEARABLE_BT_PROTOCOLS = {
-    'rio': {
-        'Comp. App': 'FALSE',
-        'HFP (pri.)': 'FALSE',
-        'HFP (sec.)': 'FALSE',
-        'A2DP (pri.)': 'FALSE',
-        'A2DP (sec.)': 'FALSE',
-    },
-    'apollo': {
-        'Comp': 'FALSE',
-        'HFP(pri.)': 'FALSE',
-        'HFP(sec.)': 'FALSE',
-        'A2DP(pri)': 'FALSE',
-        'A2DP(sec)': 'FALSE',
-    }
-}
-
-
-class BTUtilsError(Exception):
-    """Generic BTUtils error"""
-
-
-class BTUtils(object):
-    """A utility that provides access to bluetooth controls.
-
-    This class to be maintained as a generic class such that it is compatible
-    with any devices that pair with a phone.
-    """
-
-    def __init__(self):
-        self.default_timeout = 60
-        self.logger = tako_trace_logger.TakoTraceLogger(Logger(__file__))
-
-    def bt_pair_and_connect(self, pri_device, sec_device):
-        """Pair and connect a pri_device to a sec_device.
-
-        Args:
-        pri_device: an android device with sl4a installed.
-        sec_device: a wearable device.
-
-        Returns:
-        (Tuple)True if pair and connect successful. False Otherwise.
-        Time in ms to execute the flow.
-        """
-
-        pair_time = self.bt_pair(pri_device, sec_device)
-        connect_result, connect_time = self.bt_connect(pri_device, sec_device)
-        return connect_result, pair_time + connect_time
-
-    def bt_pair(self, pri_device, sec_device):
-        """Pair a pri_device to a sec_device.
-
-        Args:
-        pri_device: an android device with sl4a installed.
-        sec_device: a wearable device.
-
-        Returns:
-            (Tuple)True if pair successful. False Otherwise.
-            Time in ms to execute the flow.
-         """
-        start_time = time.time()
-        # Enable BT on the primary device if it's not currently ON.
-        if not pri_device.droid.bluetoothCheckState():
-            pri_device.droid.bluetoothToggleState(True)
-            try:
-                pri_device.ed.pop_event(event_name='BluetoothStateChangedOn',
-                                        timeout=10)
-            except queue.Empty:
-                raise BTUtilsError(
-                    'Failed to toggle Bluetooth on the primary device.')
-        sec_device.turn_on_bluetooth()
-        if not sec_device.is_bt_enabled():
-            raise BTUtilsError('Could not turn on Bluetooth on secondary '
-                               'devices')
-        target_addr = sec_device.bluetooth_address
-        sec_device.set_pairing_mode()
-
-        pri_device.droid.bluetoothDiscoverAndBond(target_addr)
-        # Loop until we have bonded successfully or timeout.
-        self.logger.info('Verifying devices are bonded')
-        try:
-            wait_until(lambda: self.android_device_in_paired_state(pri_device,
-                                                                   target_addr),
-                       self.default_timeout)
-        except TimeoutError as err:
-            raise BTUtilsError('bt_pair failed: {}'.format(err))
-        end_time = time.time()
-        return end_time - start_time
-
-    def bt_connect(self, pri_device, sec_device):
-        """Connect a previously paired sec_device to a pri_device.
-
-        Args:
-          pri_device: an android device with sl4a installed.
-          sec_device: a wearable device.
-
-        Returns:
-          (Tuple)True if connect successful. False otherwise.
-          Time in ms to execute the flow.
-        """
-        start_time = end_time = time.time()
-        target_addr = sec_device.bluetooth_address
-        # First check that devices are bonded.
-        paired = False
-        for paired_device in pri_device.droid.bluetoothGetBondedDevices():
-            if paired_device['address'] == target_addr:
-                paired = True
-                break
-        if not paired:
-            self.logger.error('Not paired to %s', sec_device.device_name)
-            return False, 0
-
-        self.logger.info('Attempting to connect.')
-        pri_device.droid.bluetoothConnectBonded(target_addr)
-
-        self.logger.info('Verifying devices are connected')
-        wait_until(
-            lambda: self.android_device_in_connected_state(pri_device,
-                                                           target_addr),
-            self.default_timeout)
-        end_time = time.time()
-        return True, end_time - start_time
-
-    def android_device_in_paired_state(self, device, mac_address):
-        """Check device in paired list."""
-        bonded_devices = device.droid.bluetoothGetBondedDevices()
-        for d in bonded_devices:
-            if d['address'] == mac_address:
-                self.logger.info('Successfully bonded to device')
-                return True
-        return False
-
-    def android_device_in_connected_state(self, device, mac_address):
-        """Check device in connected list."""
-        connected_devices = device.droid.bluetoothGetConnectedDevices()
-        for d in connected_devices:
-            if d['address'] == mac_address:
-                self.logger.info('Successfully connected to device')
-                return True
-        return False
-
-    def bt_unpair(self, pri_device, sec_device, factory_reset_dut=True):
-        """Unpairs two Android devices using bluetooth.
-
-        Args:
-          pri_device: an android device with sl4a installed.
-          sec_device: a wearable device.
-
-        Returns:
-          (Tuple)True: if the devices successfully unpaired.
-          Time in ms to execute the flow.
-        Raises:
-          Error: When devices fail to unpair.
-        """
-        target_address = sec_device.bluetooth_address
-        if not self.android_device_in_paired_state(pri_device, target_address):
-            self.logger.debug('Already unpaired.')
-            return True, 0
-        self.logger.debug('Unpairing from %s' % target_address)
-        start_time = end_time = time.time()
-        asserts.assert_true(
-            pri_device.droid.bluetoothUnbond(target_address),
-            'Failed to request device unpairing.')
-
-        # Check that devices have unpaired successfully.
-        self.logger.debug('Verifying devices are unpaired')
-
-        # Loop until we have unbonded successfully or timeout.
-        wait_until(
-            lambda: self.android_device_in_paired_state(pri_device,
-                                                        target_address),
-            self.default_timeout,
-            condition=False)
-
-        self.logger.info('Successfully unpaired from %s' % target_address)
-        if factory_reset_dut:
-            self.logger.info('Factory reset DUT')
-            sec_device.factory_reset()
-        end_time = time.time()
-        return True, end_time - start_time
-
-    def check_device_bt(self, device, **kwargs):
-        """Check the Bluetooth connection status from device.
-
-        Args:
-          device: a wearable device.
-          **kwargs: additional parameters
-
-        Returns:
-          True: if bt status check success, False otherwise.
-        """
-        if device.dut_type in ['rio', 'apollo']:
-            profiles = kwargs.get('profiles')
-            return self.check_dut_status(device, profiles)
-
-    def check_dut_status(self, device, profiles=None):
-        """Check the Bluetooth connection status from rio/apollo device.
-
-        Args:
-          device: rio/apollo device
-          profiles: A dict of profiles, eg. {'HFP (pri.)': 'TRUE', 'Comp. App':
-            'TRUE', 'A2DP (pri.)': 'TRUE'}
-
-        Returns:
-          True: if bt status check success, False otherwise.
-        """
-        expected = WEARABLE_BT_PROTOCOLS
-        self.logger.info(profiles)
-        for key in profiles:
-            expected[device.dut_type][key] = profiles[key]
-        try:
-            wait_until(lambda: self._compare_profile(device,
-                                                     expected[device.dut_type]),
-                       self.default_timeout)
-        except TimeoutError:
-            status = device.get_bt_status()
-            msg_fmt = self._get_formatted_output(expected[device.dut_type],
-                                                 status)
-            self.logger.error(msg_fmt)
-            return False
-        return True
-
-    def _get_formatted_output(self, expected, actual):
-        """On BT status mismatch generate formatted output string.
-
-        Args:
-          expected: Expected BT status hash.
-          actual: Actual BT status hash from Rio.
-
-        Returns:
-          Formatted mismatch string.
-
-        Raises:
-          Error: When unexpcted parameter encounterd.
-        """
-        msg = ''
-        mismatch_format = '{}: Expected {} Actual {}. '
-        if actual is None:
-            raise BTUtilsError('None is not expected.')
-        for key in expected.keys():
-            if expected[key] != actual[key]:
-                msg += mismatch_format.format(key, expected[key], actual[key])
-        return msg
-
-    def _compare_profile(self, device, expected):
-        """Compare input expected profile with actual."""
-        actual = device.get_bt_status()
-        if actual is None:
-            raise BTUtilsError('None is not expected.')
-        for key in expected.keys():
-            if expected[key] != actual[key]:
-                return False
-        return True
diff --git a/src/antlion/controllers/cellular_lib/AndroidCellularDut.py b/src/antlion/controllers/cellular_lib/AndroidCellularDut.py
deleted file mode 100644
index b9fa887..0000000
--- a/src/antlion/controllers/cellular_lib/AndroidCellularDut.py
+++ /dev/null
@@ -1,136 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.android_lib.tel import tel_utils
-from antlion.controllers.cellular_lib import BaseCellularDut
-import os
-
-GET_BUILD_VERSION = 'getprop ro.build.version.release'
-PIXELLOGGER_CONTROL = 'am broadcast -n com.android.pixellogger/.receiver.' \
-                      'AlwaysOnLoggingReceiver -a com.android.pixellogger.' \
-                      'service.logging.LoggingService.' \
-                      'ACTION_CONFIGURE_ALWAYS_ON_LOGGING ' \
-                      '-e intent_key_enable "{}"'
-
-NETWORK_TYPE_TO_BITMASK = {
-    BaseCellularDut.PreferredNetworkType.LTE_ONLY: '01000001000000000000',
-    BaseCellularDut.PreferredNetworkType.NR_LTE: '11000001000000000000',
-    BaseCellularDut.PreferredNetworkType.WCDMA_ONLY: '00000100001110000100',
-}
-
-class AndroidCellularDut(BaseCellularDut.BaseCellularDut):
-    """ Android implementation of the cellular DUT class."""
-    def __init__(self, ad, logger):
-        """ Keeps a handler to the android device.
-
-        Args:
-           ad: Android device handler
-           logger: a handler to the logger object
-        """
-        self.ad = ad
-        self.log = logger
-        logger.info('Initializing Android DUT with baseband version {}'.format(
-            ad.adb.getprop('gsm.version.baseband')))
-
-    def toggle_airplane_mode(self, new_state=True):
-        """ Turns airplane mode on / off.
-
-        Args:
-          new_state: True if airplane mode needs to be enabled.
-        """
-        tel_utils.toggle_airplane_mode(self.log, self.ad, new_state)
-
-    def toggle_data_roaming(self, new_state=True):
-        """ Enables or disables cellular data roaming.
-
-        Args:
-          new_state: True if data roaming needs to be enabled.
-        """
-        tel_utils.toggle_cell_data_roaming(self.ad, new_state)
-
-    def get_rx_tx_power_levels(self):
-        """ Obtains Rx and Tx power levels measured from the DUT.
-
-        Returns:
-            A tuple where the first element is an array with the RSRP value
-            in each Rx chain, and the second element is the Tx power in dBm.
-            Values for invalid or disabled Rx / Tx chains are set to None.
-        """
-        return tel_utils.get_rx_tx_power_levels(self.log, self.ad)
-
-    def set_apn(self, name, apn, type='default'):
-        """ Sets the Access Point Name.
-
-        Args:
-          name: the APN name
-          apn: the APN
-          type: the APN type
-        """
-        self.ad.droid.telephonySetAPN(name, apn, type)
-
-    def set_preferred_network_type(self, type):
-        """ Sets the preferred RAT.
-
-        Args:
-          type: an instance of class PreferredNetworkType
-        """
-
-        # If android version is S or later, uses bit mask to set and return.
-        version = self.ad.adb.shell(GET_BUILD_VERSION)
-        try:
-            version_in_number = int(version)
-            if version_in_number > 11:
-                set_network_cmd = 'cmd phone set-allowed-network-types-for-users '
-                set_network_cmd += NETWORK_TYPE_TO_BITMASK[type]
-                self.ad.adb.shell(set_network_cmd)
-                get_network_cmd = 'cmd phone get-allowed-network-types-for-users'
-                allowed_network = self.ad.adb.shell(get_network_cmd)
-                self.log.info('The allowed network: {}'.format(allowed_network))
-                return
-        except ValueError:
-            self.log.info('The android version is older than S, use sl4a')
-
-        if type == BaseCellularDut.PreferredNetworkType.LTE_ONLY:
-            formatted_type = tel_utils.NETWORK_MODE_LTE_ONLY
-        elif type == BaseCellularDut.PreferredNetworkType.WCDMA_ONLY:
-            formatted_type = tel_utils.NETWORK_MODE_WCDMA_ONLY
-        elif type == BaseCellularDut.PreferredNetworkType.GSM_ONLY:
-            formatted_type = tel_utils.NETWORK_MODE_GSM_ONLY
-        else:
-            raise ValueError('Invalid RAT type.')
-
-        if not self.ad.droid.telephonySetPreferredNetworkTypesForSubscription(
-                formatted_type, self.ad.droid.subscriptionGetDefaultSubId()):
-            self.log.error("Could not set preferred network type.")
-        else:
-            self.log.info("Preferred network type set.")
-
-    def get_telephony_signal_strength(self):
-        """ Wrapper for the method with the same name in tel_utils.
-
-        Will be deprecated and replaced by get_rx_tx_power_levels. """
-        tel_utils.get_telephony_signal_strength(self.ad)
-
-    def start_modem_logging(self):
-        """ Starts on-device log collection. """
-        self.ad.adb.shell('rm /data/vendor/slog/*.* -f')
-        self.ad.adb.shell(PIXELLOGGER_CONTROL.format('true'))
-
-    def stop_modem_logging(self):
-        """ Stops log collection and pulls logs. """
-        output_path = self.ad.device_log_path + '/modem/'
-        os.makedirs(output_path, exist_ok=True)
-        self.ad.adb.shell(PIXELLOGGER_CONTROL.format('false'))
diff --git a/src/antlion/controllers/cellular_lib/BaseCellConfig.py b/src/antlion/controllers/cellular_lib/BaseCellConfig.py
deleted file mode 100644
index 71939fd..0000000
--- a/src/antlion/controllers/cellular_lib/BaseCellConfig.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-class BaseCellConfig:
-    """ Base cell configuration class.
-
-    Attributes:
-      output_power: a float indicating the required signal level at the
-          instrument's output.
-      input_power: a float indicating the required signal level at the
-          instrument's input.
-    """
-    # Configuration dictionary keys
-    PARAM_UL_PW = 'pul'
-    PARAM_DL_PW = 'pdl'
-
-    def __init__(self, log):
-        """ Initialize the base station config by setting all its
-            parameters to None.
-        Args:
-            log: logger object.
-        """
-        self.log = log
-        self.output_power = None
-        self.input_power = None
-        self.band = None
-
-    def incorporate(self, new_config):
-        """ Incorporates a different configuration by replacing the current
-            values with the new ones for all the parameters different to None.
-        Args:
-            new_config: 5G cell configuration object.
-        """
-        for attr, value in vars(new_config).items():
-            if value and not hasattr(self, attr):
-                setattr(self, attr, value)
diff --git a/src/antlion/controllers/cellular_lib/BaseCellularDut.py b/src/antlion/controllers/cellular_lib/BaseCellularDut.py
deleted file mode 100644
index 2e677a6..0000000
--- a/src/antlion/controllers/cellular_lib/BaseCellularDut.py
+++ /dev/null
@@ -1,87 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from enum import Enum
-
-
-class PreferredNetworkType(Enum):
-    """ Available preferred network types that can be passed to
-  set_preferred_network_type"""
-    LTE_ONLY = 'lte-only'
-    GSM_ONLY = 'gsm-only'
-    WCDMA_ONLY = 'wcdma-only'
-    NR_LTE = 'nr-lte'
-
-
-class BaseCellularDut():
-    """ Base class for DUTs used with cellular simulators. """
-    def toggle_airplane_mode(self, new_state=True):
-        """ Turns airplane mode on / off.
-
-        Args:
-          new_state: True if airplane mode needs to be enabled.
-        """
-        raise NotImplementedError()
-
-    def toggle_data_roaming(self, new_state=True):
-        """ Enables or disables cellular data roaming.
-
-        Args:
-          new_state: True if data roaming needs to be enabled.
-        """
-        raise NotImplementedError()
-
-    def get_rx_tx_power_levels(self):
-        """ Obtains Rx and Tx power levels measured from the DUT.
-
-        Returns:
-          A tuple where the first element is an array with the RSRP value
-          in each Rx chain, and the second element is the Tx power in dBm.
-          Values for invalid or disabled Rx / Tx chains are set to None.
-        """
-        raise NotImplementedError()
-
-    def set_apn(self, name, apn, type='default'):
-        """ Sets the Access Point Name.
-
-        Args:
-          name: the APN name
-          apn: the APN
-          type: the APN type
-        """
-        raise NotImplementedError()
-
-    def set_preferred_network_type(self, type):
-        """ Sets the preferred RAT.
-
-        Args:
-          type: an instance of class PreferredNetworkType
-        """
-        raise NotImplementedError()
-
-    def get_telephony_signal_strength(self):
-        """ Wrapper for the method with the same name in tel_utils.
-
-        Will be deprecated and replaced by get_rx_tx_power_levels. """
-        raise NotImplementedError()
-
-    def start_modem_logging(self):
-        """ Starts on-device log collection. """
-        raise NotImplementedError()
-
-    def stop_modem_logging(self):
-        """ Stops log collection and pulls logs. """
-        raise NotImplementedError()
diff --git a/src/antlion/controllers/cellular_lib/BaseSimulation.py b/src/antlion/controllers/cellular_lib/BaseSimulation.py
deleted file mode 100644
index 043f802..0000000
--- a/src/antlion/controllers/cellular_lib/BaseSimulation.py
+++ /dev/null
@@ -1,741 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-from enum import Enum
-
-import numpy as np
-from antlion.controllers import cellular_simulator
-from antlion.controllers.cellular_lib.BaseCellConfig import BaseCellConfig
-
-
-class BaseSimulation(object):
-    """ Base class for cellular connectivity simulations.
-
-    Classes that inherit from this base class implement different simulation
-    setups. The base class contains methods that are common to all simulation
-    configurations.
-
-    """
-
-    NUM_UL_CAL_READS = 3
-    NUM_DL_CAL_READS = 5
-    MAX_BTS_INPUT_POWER = 30
-    MAX_PHONE_OUTPUT_POWER = 23
-    UL_MIN_POWER = -60.0
-
-    # Keys to obtain settings from the test_config dictionary.
-    KEY_CALIBRATION = "calibration"
-    KEY_ATTACH_RETRIES = "attach_retries"
-    KEY_ATTACH_TIMEOUT = "attach_timeout"
-
-    # Filepath to the config files stored in the Anritsu callbox. Needs to be
-    # formatted to replace {} with either A or B depending on the model.
-    CALLBOX_PATH_FORMAT_STR = 'C:\\Users\\MD8475{}\\Documents\\DAN_configs\\'
-
-    # Time in seconds to wait for the phone to settle
-    # after attaching to the base station.
-    SETTLING_TIME = 10
-
-    # Default time in seconds to wait for the phone to attach to the basestation
-    # after toggling airplane mode. This setting can be changed with the
-    # KEY_ATTACH_TIMEOUT keyword in the test configuration file.
-    DEFAULT_ATTACH_TIMEOUT = 120
-
-    # The default number of attach retries. This setting can be changed with
-    # the KEY_ATTACH_RETRIES keyword in the test configuration file.
-    DEFAULT_ATTACH_RETRIES = 3
-
-    # These two dictionaries allow to map from a string to a signal level and
-    # have to be overridden by the simulations inheriting from this class.
-    UPLINK_SIGNAL_LEVEL_DICTIONARY = {}
-    DOWNLINK_SIGNAL_LEVEL_DICTIONARY = {}
-
-    # Units for downlink signal level. This variable has to be overridden by
-    # the simulations inheriting from this class.
-    DOWNLINK_SIGNAL_LEVEL_UNITS = None
-
-    def __init__(
-        self, simulator, log, dut, test_config, calibration_table,
-        nr_mode=None):
-        """ Initializes the Simulation object.
-
-        Keeps a reference to the callbox, log and dut handlers and
-        initializes the class attributes.
-
-        Args:
-            simulator: a cellular simulator controller
-            log: a logger handle
-            dut: a device handler implementing BaseCellularDut
-            test_config: test configuration obtained from the config file
-            calibration_table: a dictionary containing path losses for
-                different bands.
-        """
-
-        self.simulator = simulator
-        self.log = log
-        self.dut = dut
-        self.calibration_table = calibration_table
-        self.nr_mode = nr_mode
-
-        # Turn calibration on or off depending on the test config value. If the
-        # key is not present, set to False by default
-        if self.KEY_CALIBRATION not in test_config:
-            self.log.warning('The {} key is not set in the testbed '
-                             'parameters. Setting to off by default. To '
-                             'turn calibration on, include the key with '
-                             'a true/false value.'.format(
-                                 self.KEY_CALIBRATION))
-
-        self.calibration_required = test_config.get(self.KEY_CALIBRATION,
-                                                    False)
-
-        # Obtain the allowed number of retries from the test configs
-        if self.KEY_ATTACH_RETRIES not in test_config:
-            self.log.warning('The {} key is not set in the testbed '
-                             'parameters. Setting to {} by default.'.format(
-                                 self.KEY_ATTACH_RETRIES,
-                                 self.DEFAULT_ATTACH_RETRIES))
-
-        self.attach_retries = test_config.get(self.KEY_ATTACH_RETRIES,
-                                              self.DEFAULT_ATTACH_RETRIES)
-
-        # Obtain the attach timeout from the test configs
-        if self.KEY_ATTACH_TIMEOUT not in test_config:
-            self.log.warning('The {} key is not set in the testbed '
-                             'parameters. Setting to {} by default.'.format(
-                                 self.KEY_ATTACH_TIMEOUT,
-                                 self.DEFAULT_ATTACH_TIMEOUT))
-
-        self.attach_timeout = test_config.get(self.KEY_ATTACH_TIMEOUT,
-                                              self.DEFAULT_ATTACH_TIMEOUT)
-
-        # Create an empty list for cell configs.
-        self.cell_configs = []
-
-        # Store the current calibrated band
-        self.current_calibrated_band = None
-
-        # Path loss measured during calibration
-        self.dl_path_loss = None
-        self.ul_path_loss = None
-
-        # Target signal levels obtained during configuration
-        self.sim_dl_power = None
-        self.sim_ul_power = None
-
-        # Stores RRC status change timer
-        self.rrc_sc_timer = None
-
-        # Set to default APN
-        log.info("Configuring APN.")
-        self.dut.set_apn('test', 'test')
-
-        # Enable roaming on the phone
-        self.dut.toggle_data_roaming(True)
-
-        # Make sure airplane mode is on so the phone won't attach right away
-        self.dut.toggle_airplane_mode(True)
-
-        # Wait for airplane mode setting to propagate
-        time.sleep(2)
-
-        # Prepare the simulator for this simulation setup
-        self.setup_simulator()
-
-    def setup_simulator(self):
-        """ Do initial configuration in the simulator. """
-        raise NotImplementedError()
-
-    def attach(self):
-        """ Attach the phone to the basestation.
-
-        Sets a good signal level, toggles airplane mode
-        and waits for the phone to attach.
-
-        Returns:
-            True if the phone was able to attach, False if not.
-        """
-
-        # Turn on airplane mode
-        self.dut.toggle_airplane_mode(True)
-
-        # Wait for airplane mode setting to propagate
-        time.sleep(2)
-
-        # Provide a good signal power for the phone to attach easily
-        new_config = BaseCellConfig(self.log)
-        new_config.input_power = -10
-        new_config.output_power = -30
-        self.simulator.configure_bts(new_config)
-        self.cell_configs[0].incorporate(new_config)
-
-        # Try to attach the phone.
-        for i in range(self.attach_retries):
-
-            try:
-
-                # Turn off airplane mode
-                self.dut.toggle_airplane_mode(False)
-
-                # Wait for the phone to attach.
-                self.simulator.wait_until_attached(timeout=self.attach_timeout)
-
-            except cellular_simulator.CellularSimulatorError:
-
-                # The phone failed to attach
-                self.log.info(
-                    "UE failed to attach on attempt number {}.".format(i + 1))
-
-                # Turn airplane mode on to prepare the phone for a retry.
-                self.dut.toggle_airplane_mode(True)
-
-                # Wait for APM to propagate
-                time.sleep(3)
-
-                # Retry
-                if i < self.attach_retries - 1:
-                    # Retry
-                    continue
-                else:
-                    # No more retries left. Return False.
-                    return False
-
-            else:
-                # The phone attached successfully.
-                time.sleep(self.SETTLING_TIME)
-                self.log.info("UE attached to the callbox.")
-                break
-
-        return True
-
-    def detach(self):
-        """ Detach the phone from the basestation.
-
-        Turns airplane mode and resets basestation.
-        """
-
-        # Set the DUT to airplane mode so it doesn't see the
-        # cellular network going off
-        self.dut.toggle_airplane_mode(True)
-
-        # Wait for APM to propagate
-        time.sleep(2)
-
-        # Power off basestation
-        self.simulator.detach()
-
-    def stop(self):
-        """  Detach phone from the basestation by stopping the simulation.
-
-        Stop the simulation and turn airplane mode on. """
-
-        # Set the DUT to airplane mode so it doesn't see the
-        # cellular network going off
-        self.dut.toggle_airplane_mode(True)
-
-        # Wait for APM to propagate
-        time.sleep(2)
-
-        # Stop the simulation
-        self.simulator.stop()
-
-    def start(self):
-        """ Start the simulation by attaching the phone and setting the
-        required DL and UL power.
-
-        Note that this refers to starting the simulated testing environment
-        and not to starting the signaling on the cellular instruments,
-        which might have been done earlier depending on the cellular
-        instrument controller implementation. """
-
-        if not self.attach():
-            raise RuntimeError('Could not attach to base station.')
-
-        # Starts IP traffic while changing this setting to force the UE to be
-        # in Communication state, as UL power cannot be set in Idle state
-        self.start_traffic_for_calibration()
-
-        # Wait until it goes to communication state
-        self.simulator.wait_until_communication_state()
-
-        # Set uplink power to a low value before going to the actual desired
-        # value. This avoid inconsistencies produced by the hysteresis in the
-        # PA switching points.
-        self.log.info('Setting UL power to -5 dBm before going to the '
-                      'requested value to avoid incosistencies caused by '
-                      'hysteresis.')
-        self.set_uplink_tx_power(-5)
-
-        # Set signal levels obtained from the test parameters
-        self.set_downlink_rx_power(self.sim_dl_power)
-        self.set_uplink_tx_power(self.sim_ul_power)
-
-        # Verify signal level
-        try:
-            rx_power, tx_power = self.dut.get_rx_tx_power_levels()
-
-            if not tx_power or not rx_power[0]:
-                raise RuntimeError('The method return invalid Tx/Rx values.')
-
-            self.log.info('Signal level reported by the DUT in dBm: Tx = {}, '
-                          'Rx = {}.'.format(tx_power, rx_power))
-
-            if abs(self.sim_ul_power - tx_power) > 1:
-                self.log.warning('Tx power at the UE is off by more than 1 dB')
-
-        except RuntimeError as e:
-            self.log.error('Could not verify Rx / Tx levels: %s.' % e)
-
-        # Stop IP traffic after setting the UL power level
-        self.stop_traffic_for_calibration()
-
-    def configure(self, parameters):
-        """ Configures simulation using a dictionary of parameters.
-
-        Children classes need to call this method first.
-
-        Args:
-            parameters: a configuration dictionary
-        """
-        # Setup uplink power
-        ul_power = self.get_uplink_power_from_parameters(parameters)
-
-        # Power is not set on the callbox until after the simulation is
-        # started. Saving this value in a variable for later
-        self.sim_ul_power = ul_power
-
-        # Setup downlink power
-
-        dl_power = self.get_downlink_power_from_parameters(parameters)
-
-        # Power is not set on the callbox until after the simulation is
-        # started. Saving this value in a variable for later
-        self.sim_dl_power = dl_power
-
-    def set_uplink_tx_power(self, signal_level):
-        """ Configure the uplink tx power level
-
-        Args:
-            signal_level: calibrated tx power in dBm
-        """
-        new_config = BaseCellConfig(self.log)
-        new_config.input_power = self.calibrated_uplink_tx_power(
-            self.cell_configs[0], signal_level)
-        self.simulator.configure_bts(new_config)
-        self.cell_configs[0].incorporate(new_config)
-
-    def set_downlink_rx_power(self, signal_level):
-        """ Configure the downlink rx power level
-
-        Args:
-            signal_level: calibrated rx power in dBm
-        """
-        new_config = BaseCellConfig(self.log)
-        new_config.output_power = self.calibrated_downlink_rx_power(
-            self.cell_configs[0], signal_level)
-        self.simulator.configure_bts(new_config)
-        self.cell_configs[0].incorporate(new_config)
-
-    def get_uplink_power_from_parameters(self, parameters):
-        """ Reads uplink power from the parameter dictionary. """
-
-        if BaseCellConfig.PARAM_UL_PW in parameters:
-            value = parameters[BaseCellConfig.PARAM_UL_PW]
-            if value in self.UPLINK_SIGNAL_LEVEL_DICTIONARY:
-                return self.UPLINK_SIGNAL_LEVEL_DICTIONARY[value]
-            else:
-                try:
-                    if isinstance(value[0], str) and value[0] == 'n':
-                        # Treat the 'n' character as a negative sign
-                        return -int(value[1:])
-                    else:
-                        return int(value)
-                except ValueError:
-                    pass
-
-        # If the method got to this point it is because PARAM_UL_PW was not
-        # included in the test parameters or the provided value was invalid.
-        raise ValueError(
-            "The config dictionary must include a key {} with the desired "
-            "uplink power expressed by an integer number in dBm or with one of "
-            "the following values: {}. To indicate negative "
-            "values, use the letter n instead of - sign.".format(
-                BaseCellConfig.PARAM_UL_PW,
-                list(self.UPLINK_SIGNAL_LEVEL_DICTIONARY.keys())))
-
-    def get_downlink_power_from_parameters(self, parameters):
-        """ Reads downlink power from a the parameter dictionary. """
-
-        if BaseCellConfig.PARAM_DL_PW in parameters:
-            value = parameters[BaseCellConfig.PARAM_DL_PW]
-            if value not in self.DOWNLINK_SIGNAL_LEVEL_DICTIONARY:
-                raise ValueError(
-                    "Invalid signal level value {}.".format(value))
-            else:
-                return self.DOWNLINK_SIGNAL_LEVEL_DICTIONARY[value]
-        else:
-            # Use default value
-            power = self.DOWNLINK_SIGNAL_LEVEL_DICTIONARY['excellent']
-            self.log.info("No DL signal level value was indicated in the test "
-                          "parameters. Using default value of {} {}.".format(
-                              power, self.DOWNLINK_SIGNAL_LEVEL_UNITS))
-            return power
-
-    def calibrated_downlink_rx_power(self, bts_config, signal_level):
-        """ Calculates the power level at the instrument's output in order to
-        obtain the required rx power level at the DUT's input.
-
-        If calibration values are not available, returns the uncalibrated signal
-        level.
-
-        Args:
-            bts_config: the current configuration at the base station. derived
-                classes implementations can use this object to indicate power as
-                spectral power density or in other units.
-            signal_level: desired downlink received power, can be either a
-                key value pair, an int or a float
-        """
-
-        # Obtain power value if the provided signal_level is a key value pair
-        if isinstance(signal_level, Enum):
-            power = signal_level.value
-        else:
-            power = signal_level
-
-        # Try to use measured path loss value. If this was not set, it will
-        # throw an TypeError exception
-        try:
-            calibrated_power = round(power + self.dl_path_loss)
-            if calibrated_power > self.simulator.MAX_DL_POWER:
-                self.log.warning(
-                    "Cannot achieve phone DL Rx power of {} dBm. Requested TX "
-                    "power of {} dBm exceeds callbox limit!".format(
-                        power, calibrated_power))
-                calibrated_power = self.simulator.MAX_DL_POWER
-                self.log.warning(
-                    "Setting callbox Tx power to max possible ({} dBm)".format(
-                        calibrated_power))
-
-            self.log.info(
-                "Requested phone DL Rx power of {} dBm, setting callbox Tx "
-                "power at {} dBm".format(power, calibrated_power))
-            time.sleep(2)
-            # Power has to be a natural number so calibration wont be exact.
-            # Inform the actual received power after rounding.
-            self.log.info(
-                "Phone downlink received power is {0:.2f} dBm".format(
-                    calibrated_power - self.dl_path_loss))
-            return calibrated_power
-        except TypeError:
-            self.log.info("Phone downlink received power set to {} (link is "
-                          "uncalibrated).".format(round(power)))
-            return round(power)
-
-    def calibrated_uplink_tx_power(self, bts_config, signal_level):
-        """ Calculates the power level at the instrument's input in order to
-        obtain the required tx power level at the DUT's output.
-
-        If calibration values are not available, returns the uncalibrated signal
-        level.
-
-        Args:
-            bts_config: the current configuration at the base station. derived
-                classes implementations can use this object to indicate power as
-                spectral power density or in other units.
-            signal_level: desired uplink transmitted power, can be either a
-                key value pair, an int or a float
-        """
-
-        # Obtain power value if the provided signal_level is a key value pair
-        if isinstance(signal_level, Enum):
-            power = signal_level.value
-        else:
-            power = signal_level
-
-        # Try to use measured path loss value. If this was not set, it will
-        # throw an TypeError exception
-        try:
-            calibrated_power = round(power - self.ul_path_loss)
-            if calibrated_power < self.UL_MIN_POWER:
-                self.log.warning(
-                    "Cannot achieve phone UL Tx power of {} dBm. Requested UL "
-                    "power of {} dBm exceeds callbox limit!".format(
-                        power, calibrated_power))
-                calibrated_power = self.UL_MIN_POWER
-                self.log.warning(
-                    "Setting UL Tx power to min possible ({} dBm)".format(
-                        calibrated_power))
-
-            self.log.info(
-                "Requested phone UL Tx power of {} dBm, setting callbox Rx "
-                "power at {} dBm".format(power, calibrated_power))
-            time.sleep(2)
-            # Power has to be a natural number so calibration wont be exact.
-            # Inform the actual transmitted power after rounding.
-            self.log.info(
-                "Phone uplink transmitted power is {0:.2f} dBm".format(
-                    calibrated_power + self.ul_path_loss))
-            return calibrated_power
-        except TypeError:
-            self.log.info("Phone uplink transmitted power set to {} (link is "
-                          "uncalibrated).".format(round(power)))
-            return round(power)
-
-    def calibrate(self, band):
-        """ Calculates UL and DL path loss if it wasn't done before.
-
-        The should be already set to the required band before calling this
-        method.
-
-        Args:
-            band: the band that is currently being calibrated.
-        """
-
-        if self.dl_path_loss and self.ul_path_loss:
-            self.log.info("Measurements are already calibrated.")
-
-        # Attach the phone to the base station
-        if not self.attach():
-            self.log.info(
-                "Skipping calibration because the phone failed to attach.")
-            return
-
-        # If downlink or uplink were not yet calibrated, do it now
-        if not self.dl_path_loss:
-            self.dl_path_loss = self.downlink_calibration()
-        if not self.ul_path_loss:
-            self.ul_path_loss = self.uplink_calibration()
-
-        # Detach after calibrating
-        self.detach()
-        time.sleep(2)
-
-    def start_traffic_for_calibration(self):
-        """
-            Starts UDP IP traffic before running calibration. Uses APN_1
-            configured in the phone.
-        """
-        self.simulator.start_data_traffic()
-
-    def stop_traffic_for_calibration(self):
-        """
-            Stops IP traffic after calibration.
-        """
-        self.simulator.stop_data_traffic()
-
-    def downlink_calibration(self, rat=None, power_units_conversion_func=None):
-        """ Computes downlink path loss and returns the calibration value
-
-        The DUT needs to be attached to the base station before calling this
-        method.
-
-        Args:
-            rat: desired RAT to calibrate (matching the label reported by
-                the phone)
-            power_units_conversion_func: a function to convert the units
-                reported by the phone to dBm. needs to take two arguments: the
-                reported signal level and bts. use None if no conversion is
-                needed.
-        Returns:
-            Downlink calibration value and measured DL power.
-        """
-
-        # Check if this parameter was set. Child classes may need to override
-        # this class passing the necessary parameters.
-        if not rat:
-            raise ValueError(
-                "The parameter 'rat' has to indicate the RAT being used as "
-                "reported by the phone.")
-
-        # Save initial output level to restore it after calibration
-        restoration_config = BaseCellConfig(self.log)
-        restoration_config.output_power = self.cell_configs[0].output_power
-
-        # Set BTS to a good output level to minimize measurement error
-        new_config = BaseCellConfig(self.log)
-        new_config.output_power = self.simulator.MAX_DL_POWER - 5
-        self.simulator.configure_bts(new_config)
-
-        # Starting IP traffic
-        self.start_traffic_for_calibration()
-
-        down_power_measured = []
-        for i in range(0, self.NUM_DL_CAL_READS):
-            # For some reason, the RSRP gets updated on Screen ON event
-            signal_strength = self.dut.get_telephony_signal_strength()
-            down_power_measured.append(signal_strength[rat])
-            time.sleep(5)
-
-        # Stop IP traffic
-        self.stop_traffic_for_calibration()
-
-        # Reset bts to original settings
-        self.simulator.configure_bts(restoration_config)
-        time.sleep(2)
-
-        # Calculate the mean of the measurements
-        reported_asu_power = np.nanmean(down_power_measured)
-
-        # Convert from RSRP to signal power
-        if power_units_conversion_func:
-            avg_down_power = power_units_conversion_func(
-                reported_asu_power, self.cell_configs[0])
-        else:
-            avg_down_power = reported_asu_power
-
-        # Calculate Path Loss
-        dl_target_power = self.simulator.MAX_DL_POWER - 5
-        down_call_path_loss = dl_target_power - avg_down_power
-
-        # Validate the result
-        if not 0 < down_call_path_loss < 100:
-            raise RuntimeError(
-                "Downlink calibration failed. The calculated path loss value "
-                "was {} dBm.".format(down_call_path_loss))
-
-        self.log.info(
-            "Measured downlink path loss: {} dB".format(down_call_path_loss))
-
-        return down_call_path_loss
-
-    def uplink_calibration(self):
-        """ Computes uplink path loss and returns the calibration value
-
-        The DUT needs to be attached to the base station before calling this
-        method.
-
-        Returns:
-            Uplink calibration value and measured UL power
-        """
-
-        # Save initial input level to restore it after calibration
-        restoration_config = BaseCellConfig(self.log)
-        restoration_config.input_power = self.cell_configs[0].input_power
-
-        # Set BTS1 to maximum input allowed in order to perform
-        # uplink calibration
-        target_power = self.MAX_PHONE_OUTPUT_POWER
-        new_config = BaseCellConfig(self.log)
-        new_config.input_power = self.MAX_BTS_INPUT_POWER
-        self.simulator.configure_bts(new_config)
-
-        # Start IP traffic
-        self.start_traffic_for_calibration()
-
-        up_power_per_chain = []
-        # Get the number of chains
-        cmd = 'MONITOR? UL_PUSCH'
-        uplink_meas_power = self.anritsu.send_query(cmd)
-        str_power_chain = uplink_meas_power.split(',')
-        num_chains = len(str_power_chain)
-        for ichain in range(0, num_chains):
-            up_power_per_chain.append([])
-
-        for i in range(0, self.NUM_UL_CAL_READS):
-            uplink_meas_power = self.anritsu.send_query(cmd)
-            str_power_chain = uplink_meas_power.split(',')
-
-            for ichain in range(0, num_chains):
-                if (str_power_chain[ichain] == 'DEACTIVE'):
-                    up_power_per_chain[ichain].append(float('nan'))
-                else:
-                    up_power_per_chain[ichain].append(
-                        float(str_power_chain[ichain]))
-
-            time.sleep(3)
-
-        # Stop IP traffic
-        self.stop_traffic_for_calibration()
-
-        # Reset bts to original settings
-        self.simulator.configure_bts(restoration_config)
-        time.sleep(2)
-
-        # Phone only supports 1x1 Uplink so always chain 0
-        avg_up_power = np.nanmean(up_power_per_chain[0])
-        if np.isnan(avg_up_power):
-            raise RuntimeError(
-                "Calibration failed because the callbox reported the chain to "
-                "be deactive.")
-
-        up_call_path_loss = target_power - avg_up_power
-
-        # Validate the result
-        if not 0 < up_call_path_loss < 100:
-            raise RuntimeError(
-                "Uplink calibration failed. The calculated path loss value "
-                "was {} dBm.".format(up_call_path_loss))
-
-        self.log.info(
-            "Measured uplink path loss: {} dB".format(up_call_path_loss))
-
-        return up_call_path_loss
-
-    def load_pathloss_if_required(self):
-        """ If calibration is required, try to obtain the pathloss values from
-        the calibration table and measure them if they are not available. """
-        # Invalidate the previous values
-        self.dl_path_loss = None
-        self.ul_path_loss = None
-
-        # Load the new ones
-        if self.calibration_required:
-
-            band = self.cell_configs[0].band
-
-            # Try loading the path loss values from the calibration table. If
-            # they are not available, use the automated calibration procedure.
-            try:
-                self.dl_path_loss = self.calibration_table[band]["dl"]
-                self.ul_path_loss = self.calibration_table[band]["ul"]
-            except KeyError:
-                self.calibrate(band)
-
-            # Complete the calibration table with the new values to be used in
-            # the next tests.
-            if band not in self.calibration_table:
-                self.calibration_table[band] = {}
-
-            if "dl" not in self.calibration_table[band] and self.dl_path_loss:
-                self.calibration_table[band]["dl"] = self.dl_path_loss
-
-            if "ul" not in self.calibration_table[band] and self.ul_path_loss:
-                self.calibration_table[band]["ul"] = self.ul_path_loss
-
-    def maximum_downlink_throughput(self):
-        """ Calculates maximum achievable downlink throughput in the current
-        simulation state.
-
-        Because thoughput is dependent on the RAT, this method needs to be
-        implemented by children classes.
-
-        Returns:
-            Maximum throughput in mbps
-        """
-        raise NotImplementedError()
-
-    def maximum_uplink_throughput(self):
-        """ Calculates maximum achievable downlink throughput in the current
-        simulation state.
-
-        Because thoughput is dependent on the RAT, this method needs to be
-        implemented by children classes.
-
-        Returns:
-            Maximum throughput in mbps
-        """
-        raise NotImplementedError()
diff --git a/src/antlion/controllers/cellular_lib/GsmSimulation.py b/src/antlion/controllers/cellular_lib/GsmSimulation.py
deleted file mode 100644
index f0ebf03..0000000
--- a/src/antlion/controllers/cellular_lib/GsmSimulation.py
+++ /dev/null
@@ -1,155 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import ntpath
-
-import time
-from antlion.controllers.anritsu_lib.band_constants import GSM_BAND_DCS1800
-from antlion.controllers.anritsu_lib.band_constants import GSM_BAND_EGSM900
-from antlion.controllers.anritsu_lib.band_constants import GSM_BAND_GSM850
-from antlion.controllers.anritsu_lib.band_constants import GSM_BAND_RGSM900
-from antlion.controllers.anritsu_lib.md8475a import BtsGprsMode
-from antlion.controllers.anritsu_lib.md8475a import BtsNumber
-from antlion.controllers.anritsu_lib import md8475_cellular_simulator as anritsusim
-from antlion.controllers.cellular_lib import BaseCellularDut
-from antlion.controllers.cellular_lib.BaseSimulation import BaseSimulation
-from antlion.controllers.cellular_lib.BaseCellConfig import BaseCellConfig
-
-
-class GsmSimulation(BaseSimulation):
-    """ Single base station GSM. """
-
-    # Simulation config files in the callbox computer.
-    # These should be replaced in the future by setting up
-    # the same configuration manually.
-
-    GSM_BASIC_SIM_FILE = 'SIM_default_GSM.wnssp'
-
-    GSM_CELL_FILE = 'CELL_GSM_config.wnscp'
-
-    # Configuration dictionary keys
-    PARAM_BAND = "band"
-    PARAM_GPRS = "gprs"
-    PARAM_EGPRS = "edge"
-    PARAM_NO_GPRS = "nogprs"
-    PARAM_SLOTS = "slots"
-
-    bands_parameter_mapping = {
-        '850': GSM_BAND_GSM850,
-        '900': GSM_BAND_EGSM900,
-        '1800': GSM_BAND_DCS1800,
-        '1900': GSM_BAND_RGSM900
-    }
-
-    def __init__(self, simulator, log, dut, test_config, calibration_table):
-        """ Initializes the simulator for a single-carrier GSM simulation.
-
-        Loads a simple LTE simulation environment with 1 basestation. It also
-        creates the BTS handle so we can change the parameters as desired.
-
-        Args:
-            simulator: a cellular simulator controller
-            log: a logger handle
-            dut: a device handler implementing BaseCellularDut
-            test_config: test configuration obtained from the config file
-            calibration_table: a dictionary containing path losses for
-                different bands.
-
-        """
-        # The GSM simulation relies on the cellular simulator to be a MD8475
-        if not isinstance(self.simulator, anritsusim.MD8475CellularSimulator):
-            raise ValueError('The GSM simulation relies on the simulator to '
-                             'be an Anritsu MD8475 A/B instrument.')
-
-        # The Anritsu controller needs to be unwrapped before calling
-        # super().__init__ because setup_simulator() requires self.anritsu and
-        # will be called during the parent class initialization.
-        self.anritsu = self.simulator.anritsu
-        self.bts1 = self.anritsu.get_BTS(BtsNumber.BTS1)
-
-        super().__init__(simulator, log, dut, test_config, calibration_table)
-
-        self.dut.set_preferred_network_type(
-            BaseCellularDut.PreferredNetworkType.GSM_ONLY)
-
-    def setup_simulator(self):
-        """ Do initial configuration in the simulator. """
-
-        # Load callbox config files
-        callbox_config_path = self.CALLBOX_PATH_FORMAT_STR.format(
-            self.anritsu._md8475_version)
-
-        self.anritsu.load_simulation_paramfile(
-            ntpath.join(callbox_config_path, self.GSM_BASIC_SIM_FILE))
-        self.anritsu.load_cell_paramfile(
-            ntpath.join(callbox_config_path, self.GSM_CELL_FILE))
-
-        # Start simulation if it wasn't started
-        self.anritsu.start_simulation()
-
-    def configure(self, parameters):
-        """ Configures simulation using a dictionary of parameters.
-
-        Processes GSM configuration parameters.
-
-        Args:
-            parameters: a configuration dictionary
-        """
-        # Don't call super() because Gsm doesn't control Tx power.
-
-        # Setup band
-        if self.PARAM_BAND not in parameters:
-            raise ValueError(
-                "The configuration dictionary must include key '{}' with the "
-                "required band number.".format(self.PARAM_BAND))
-
-        self.set_band(self.bts1, parameters[self.PARAM_BAND])
-        self.load_pathloss_if_required()
-
-        # Setup GPRS mode
-
-        if self.PARAM_GPRS in parameters:
-            self.bts1.gsm_gprs_mode = BtsGprsMode.GPRS
-        elif self.PARAM_EGPRS in parameters:
-            self.bts1.gsm_gprs_mode = BtsGprsMode.EGPRS
-        elif self.PARAM_NO_GPRS in parameters:
-            self.bts1.gsm_gprs_mode = BtsGprsMode.NO_GPRS
-        else:
-            raise ValueError(
-                "GPRS mode needs to be indicated in the config dictionary by "
-                "including either {}, {} or {} as a key.".format(
-                    self.PARAM_GPRS, self.PARAM_EGPRS, self.PARAM_NO_GPRS))
-
-        # Setup slot allocation
-        if self.PARAM_SLOTS not in parameters or len(
-                parameters[self.PARAM_SLOTS]) != 2:
-            raise ValueError(
-                "The config dictionary must include key {} with a list of two "
-                "int values indicating DL and UL slots.".format(
-                    self.PARAM_SLOTS))
-        values = parameters[self.PARAM_SLOTS]
-        self.bts1.gsm_slots = (int(values[0]), int(values[1]))
-
-    def set_band(self, bts, band):
-        """ Sets the band used for communication.
-
-        Args:
-            bts: basestation handle
-            band: desired band
-        """
-
-        bts.band = band
-        time.sleep(5)  # It takes some time to propagate the new band
diff --git a/src/antlion/controllers/cellular_lib/LteCellConfig.py b/src/antlion/controllers/cellular_lib/LteCellConfig.py
deleted file mode 100644
index 8666f75..0000000
--- a/src/antlion/controllers/cellular_lib/LteCellConfig.py
+++ /dev/null
@@ -1,488 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import antlion.controllers.cellular_lib.BaseCellConfig as base_cell
-import antlion.controllers.cellular_lib.LteSimulation as lte_sim
-import math
-
-
-class LteCellConfig(base_cell.BaseCellConfig):
-    """ Extension of the BaseBtsConfig to implement parameters that are
-         exclusive to LTE.
-
-    Attributes:
-        band: an integer indicating the required band number.
-        dlul_config: an integer indicating the TDD config number.
-        ssf_config: an integer indicating the Special Sub-Frame config.
-        bandwidth: a float indicating the required channel bandwidth.
-        mimo_mode: an instance of LteSimulation.MimoMode indicating the
-            required MIMO mode for the downlink signal.
-        transmission_mode: an instance of LteSimulation.TransmissionMode
-            indicating the required TM.
-        scheduling_mode: an instance of LteSimulation.SchedulingMode
-            indicating whether to use Static or Dynamic scheduling.
-        dl_rbs: an integer indicating the number of downlink RBs
-        ul_rbs: an integer indicating the number of uplink RBs
-        dl_mcs: an integer indicating the MCS for the downlink signal
-        ul_mcs: an integer indicating the MCS for the uplink signal
-        dl_256_qam_enabled: a boolean indicating if 256 QAM is enabled
-        ul_64_qam_enabled: a boolean indicating if 256 QAM is enabled
-        mac_padding: a boolean indicating whether RBs should be allocated
-            when there is no user data in static scheduling
-        dl_channel: an integer indicating the downlink channel number
-        cfi: an integer indicating the Control Format Indicator
-        paging_cycle: an integer indicating the paging cycle duration in
-            milliseconds
-        phich: a string indicating the PHICH group size parameter
-        drx_connected_mode: a boolean indicating whether cDRX mode is
-            on or off
-        drx_on_duration_timer: number of PDCCH subframes representing
-            DRX on duration
-        drx_inactivity_timer: number of PDCCH subframes to wait before
-            entering DRX mode
-        drx_retransmission_timer: number of consecutive PDCCH subframes
-            to wait for retransmission
-        drx_long_cycle: number of subframes representing one long DRX cycle.
-            One cycle consists of DRX sleep + DRX on duration
-        drx_long_cycle_offset: number representing offset in range
-            0 to drx_long_cycle - 1
-    """
-    PARAM_FRAME_CONFIG = "tddconfig"
-    PARAM_BW = "bw"
-    PARAM_SCHEDULING = "scheduling"
-    PARAM_SCHEDULING_STATIC = "static"
-    PARAM_SCHEDULING_DYNAMIC = "dynamic"
-    PARAM_PATTERN = "pattern"
-    PARAM_TM = "tm"
-    PARAM_BAND = "band"
-    PARAM_MIMO = "mimo"
-    PARAM_DL_MCS = 'dlmcs'
-    PARAM_UL_MCS = 'ulmcs'
-    PARAM_SSF = 'ssf'
-    PARAM_CFI = 'cfi'
-    PARAM_PAGING = 'paging'
-    PARAM_PHICH = 'phich'
-    PARAM_DRX = 'drx'
-    PARAM_PADDING = 'mac_padding'
-    PARAM_DL_256_QAM_ENABLED = "256_qam_dl_enabled"
-    PARAM_UL_64_QAM_ENABLED = "64_qam_ul_enabled"
-    PARAM_DL_EARFCN = 'dl_earfcn'
-
-    def __init__(self, log):
-        """ Initialize the base station config by setting all its
-        parameters to None.
-        Args:
-            log: logger object.
-        """
-        super().__init__(log)
-        self.band = None
-        self.dlul_config = None
-        self.ssf_config = None
-        self.bandwidth = None
-        self.mimo_mode = None
-        self.transmission_mode = None
-        self.scheduling_mode = None
-        self.dl_rbs = None
-        self.ul_rbs = None
-        self.dl_mcs = None
-        self.ul_mcs = None
-        self.dl_256_qam_enabled = None
-        self.ul_64_qam_enabled = None
-        self.mac_padding = None
-        self.dl_channel = None
-        self.cfi = None
-        self.paging_cycle = None
-        self.phich = None
-        self.drx_connected_mode = None
-        self.drx_on_duration_timer = None
-        self.drx_inactivity_timer = None
-        self.drx_retransmission_timer = None
-        self.drx_long_cycle = None
-        self.drx_long_cycle_offset = None
-
-    def __str__(self):
-        return str(vars(self))
-
-    def configure(self, parameters):
-        """ Configures an LTE cell using a dictionary of parameters.
-
-        Args:
-            parameters: a configuration dictionary
-        """
-        # Setup band
-        if self.PARAM_BAND not in parameters:
-            raise ValueError(
-                "The configuration dictionary must include a key '{}' with "
-                "the required band number.".format(self.PARAM_BAND))
-
-        self.band = parameters[self.PARAM_BAND]
-
-        if self.PARAM_DL_EARFCN not in parameters:
-            band = int(self.band)
-            channel = int(lte_sim.LteSimulation.LOWEST_DL_CN_DICTIONARY[band] +
-                          lte_sim.LteSimulation.LOWEST_DL_CN_DICTIONARY[band +
-                                                                        1]) / 2
-            self.log.warning(
-                "Key '{}' was not set. Using center band channel {} by default."
-                .format(self.PARAM_DL_EARFCN, channel))
-            self.dl_channel = channel
-        else:
-            self.dl_channel = parameters[self.PARAM_DL_EARFCN]
-
-        # Set TDD-only configs
-        if self.get_duplex_mode() == lte_sim.DuplexMode.TDD:
-
-            # Sub-frame DL/UL config
-            if self.PARAM_FRAME_CONFIG not in parameters:
-                raise ValueError("When a TDD band is selected the frame "
-                                 "structure has to be indicated with the '{}' "
-                                 "key with a value from 0 to 6.".format(
-                                     self.PARAM_FRAME_CONFIG))
-
-            self.dlul_config = int(parameters[self.PARAM_FRAME_CONFIG])
-
-            # Special Sub-Frame configuration
-            if self.PARAM_SSF not in parameters:
-                self.log.warning(
-                    'The {} parameter was not provided. Setting '
-                    'Special Sub-Frame config to 6 by default.'.format(
-                        self.PARAM_SSF))
-                self.ssf_config = 6
-            else:
-                self.ssf_config = int(parameters[self.PARAM_SSF])
-
-        # Setup bandwidth
-        if self.PARAM_BW not in parameters:
-            raise ValueError(
-                "The config dictionary must include parameter {} with an "
-                "int value (to indicate 1.4 MHz use 14).".format(
-                    self.PARAM_BW))
-
-        bw = float(parameters[self.PARAM_BW])
-
-        if abs(bw - 14) < 0.00000000001:
-            bw = 1.4
-
-        self.bandwidth = bw
-
-        # Setup mimo mode
-        if self.PARAM_MIMO not in parameters:
-            raise ValueError(
-                "The config dictionary must include parameter '{}' with the "
-                "mimo mode.".format(self.PARAM_MIMO))
-
-        for mimo_mode in lte_sim.MimoMode:
-            if parameters[self.PARAM_MIMO] == mimo_mode.value:
-                self.mimo_mode = mimo_mode
-                break
-        else:
-            raise ValueError("The value of {} must be one of the following:"
-                             "1x1, 2x2 or 4x4.".format(self.PARAM_MIMO))
-
-        # Setup transmission mode
-        if self.PARAM_TM not in parameters:
-            raise ValueError(
-                "The config dictionary must include key {} with an "
-                "int value from 1 to 4 indicating transmission mode.".format(
-                    self.PARAM_TM))
-
-        for tm in lte_sim.TransmissionMode:
-            if parameters[self.PARAM_TM] == tm.value[2:]:
-                self.transmission_mode = tm
-                break
-        else:
-            raise ValueError(
-                "The {} key must have one of the following values:"
-                "1, 2, 3, 4, 7, 8 or 9.".format(self.PARAM_TM))
-
-        # Setup scheduling mode
-        if self.PARAM_SCHEDULING not in parameters:
-            self.scheduling_mode = lte_sim.SchedulingMode.STATIC
-            self.log.warning(
-                "The test config does not include the '{}' key. Setting to "
-                "static by default.".format(self.PARAM_SCHEDULING))
-        elif parameters[
-                self.PARAM_SCHEDULING] == self.PARAM_SCHEDULING_DYNAMIC:
-            self.scheduling_mode = lte_sim.SchedulingMode.DYNAMIC
-        elif parameters[self.PARAM_SCHEDULING] == self.PARAM_SCHEDULING_STATIC:
-            self.scheduling_mode = lte_sim.SchedulingMode.STATIC
-        else:
-            raise ValueError("Key '{}' must have a value of "
-                             "'dynamic' or 'static'.".format(
-                                 self.PARAM_SCHEDULING))
-
-        if self.scheduling_mode == lte_sim.SchedulingMode.STATIC:
-
-            if self.PARAM_PADDING not in parameters:
-                self.log.warning(
-                    "The '{}' parameter was not set. Enabling MAC padding by "
-                    "default.".format(self.PARAM_PADDING))
-                self.mac_padding = True
-            else:
-                self.mac_padding = parameters[self.PARAM_PADDING]
-
-            if self.PARAM_PATTERN not in parameters:
-                self.log.warning(
-                    "The '{}' parameter was not set, using 100% RBs for both "
-                    "DL and UL. To set the percentages of total RBs include "
-                    "the '{}' key with a list of two ints indicating downlink "
-                    "and uplink percentages.".format(self.PARAM_PATTERN,
-                                                     self.PARAM_PATTERN))
-                dl_pattern = 100
-                ul_pattern = 100
-            else:
-                dl_pattern = int(parameters[self.PARAM_PATTERN][0])
-                ul_pattern = int(parameters[self.PARAM_PATTERN][1])
-
-            if not (0 <= dl_pattern <= 100 and 0 <= ul_pattern <= 100):
-                raise ValueError(
-                    "The scheduling pattern parameters need to be two "
-                    "positive numbers between 0 and 100.")
-
-            self.dl_rbs, self.ul_rbs = (self.allocation_percentages_to_rbs(
-                dl_pattern, ul_pattern))
-
-            # Check if 256 QAM is enabled for DL MCS
-            if self.PARAM_DL_256_QAM_ENABLED not in parameters:
-                self.log.warning("The key '{}' is not set in the test config. "
-                                 "Setting to false by default.".format(
-                                     self.PARAM_DL_256_QAM_ENABLED))
-
-            self.dl_256_qam_enabled = parameters.get(
-                self.PARAM_DL_256_QAM_ENABLED, False)
-
-            # Look for a DL MCS configuration in the test parameters. If it is
-            # not present, use a default value.
-            if self.PARAM_DL_MCS in parameters:
-                self.dl_mcs = int(parameters[self.PARAM_DL_MCS])
-            else:
-                self.log.warning(
-                    'The test config does not include the {} key. Setting '
-                    'to the max value by default'.format(self.PARAM_DL_MCS))
-                if self.dl_256_qam_enabled and self.bandwidth == 1.4:
-                    self.dl_mcs = 26
-                elif (not self.dl_256_qam_enabled and self.mac_padding
-                      and self.bandwidth != 1.4):
-                    self.dl_mcs = 28
-                else:
-                    self.dl_mcs = 27
-
-            # Check if 64 QAM is enabled for UL MCS
-            if self.PARAM_UL_64_QAM_ENABLED not in parameters:
-                self.log.warning("The key '{}' is not set in the config file. "
-                                 "Setting to false by default.".format(
-                                     self.PARAM_UL_64_QAM_ENABLED))
-
-            self.ul_64_qam_enabled = parameters.get(
-                self.PARAM_UL_64_QAM_ENABLED, False)
-
-            # Look for an UL MCS configuration in the test parameters. If it is
-            # not present, use a default value.
-            if self.PARAM_UL_MCS in parameters:
-                self.ul_mcs = int(parameters[self.PARAM_UL_MCS])
-            else:
-                self.log.warning(
-                    'The test config does not include the {} key. Setting '
-                    'to the max value by default'.format(self.PARAM_UL_MCS))
-                if self.ul_64_qam_enabled:
-                    self.ul_mcs = 28
-                else:
-                    self.ul_mcs = 23
-
-        # Configure the simulation for DRX mode
-        if self.PARAM_DRX in parameters and len(
-                parameters[self.PARAM_DRX]) == 5:
-            self.drx_connected_mode = True
-            self.drx_on_duration_timer = parameters[self.PARAM_DRX][0]
-            self.drx_inactivity_timer = parameters[self.PARAM_DRX][1]
-            self.drx_retransmission_timer = parameters[self.PARAM_DRX][2]
-            self.drx_long_cycle = parameters[self.PARAM_DRX][3]
-            try:
-                long_cycle = int(parameters[self.PARAM_DRX][3])
-                long_cycle_offset = int(parameters[self.PARAM_DRX][4])
-                if long_cycle_offset in range(0, long_cycle):
-                    self.drx_long_cycle_offset = long_cycle_offset
-                else:
-                    self.log.error(
-                        ("The cDRX long cycle offset must be in the "
-                         "range 0 to (long cycle  - 1). Setting "
-                         "long cycle offset to 0"))
-                    self.drx_long_cycle_offset = 0
-
-            except ValueError:
-                self.log.error(("cDRX long cycle and long cycle offset "
-                                "must be integers. Disabling cDRX mode."))
-                self.drx_connected_mode = False
-        else:
-            self.log.warning(
-                ("DRX mode was not configured properly. "
-                 "Please provide a list with the following values: "
-                 "1) DRX on duration timer "
-                 "2) Inactivity timer "
-                 "3) Retransmission timer "
-                 "4) Long DRX cycle duration "
-                 "5) Long DRX cycle offset "
-                 "Example: [2, 6, 16, 20, 0]."))
-
-        # Channel Control Indicator
-        if self.PARAM_CFI not in parameters:
-            self.log.warning('The {} parameter was not provided. Setting '
-                             'CFI to BESTEFFORT.'.format(self.PARAM_CFI))
-            self.cfi = 'BESTEFFORT'
-        else:
-            self.cfi = parameters[self.PARAM_CFI]
-
-        # PHICH group size
-        if self.PARAM_PHICH not in parameters:
-            self.log.warning('The {} parameter was not provided. Setting '
-                             'PHICH group size to 1 by default.'.format(
-                                 self.PARAM_PHICH))
-            self.phich = '1'
-        else:
-            if parameters[self.PARAM_PHICH] == '16':
-                self.phich = '1/6'
-            elif parameters[self.PARAM_PHICH] == '12':
-                self.phich = '1/2'
-            elif parameters[self.PARAM_PHICH] in ['1/6', '1/2', '1', '2']:
-                self.phich = parameters[self.PARAM_PHICH]
-            else:
-                raise ValueError('The {} parameter can only be followed by 1,'
-                                 '2, 1/2 (or 12) and 1/6 (or 16).'.format(
-                                     self.PARAM_PHICH))
-
-        # Paging cycle duration
-        if self.PARAM_PAGING not in parameters:
-            self.log.warning('The {} parameter was not provided. Setting '
-                             'paging cycle duration to 1280 ms by '
-                             'default.'.format(self.PARAM_PAGING))
-            self.paging_cycle = 1280
-        else:
-            try:
-                self.paging_cycle = int(parameters[self.PARAM_PAGING])
-            except ValueError:
-                raise ValueError(
-                    'The {} key has to be followed by the paging cycle '
-                    'duration in milliseconds.'.format(self.PARAM_PAGING))
-
-    def get_duplex_mode(self):
-        """ Determines if the cell uses FDD or TDD duplex mode
-
-        Returns:
-          an variable of class DuplexMode indicating if band is FDD or TDD
-        """
-        if 33 <= int(self.band) <= 46:
-            return lte_sim.DuplexMode.TDD
-        else:
-            return lte_sim.DuplexMode.FDD
-
-    def allocation_percentages_to_rbs(self, dl, ul):
-        """ Converts usage percentages to number of DL/UL RBs
-
-        Because not any number of DL/UL RBs can be obtained for a certain
-        bandwidth, this function calculates the number of RBs that most
-        closely matches the desired DL/UL percentages.
-
-        Args:
-            dl: desired percentage of downlink RBs
-            ul: desired percentage of uplink RBs
-        Returns:
-            a tuple indicating the number of downlink and uplink RBs
-        """
-
-        # Validate the arguments
-        if (not 0 <= dl <= 100) or (not 0 <= ul <= 100):
-            raise ValueError("The percentage of DL and UL RBs have to be two "
-                             "positive between 0 and 100.")
-
-        # Get min and max values from tables
-        max_rbs = lte_sim.TOTAL_RBS_DICTIONARY[self.bandwidth]
-        min_dl_rbs = lte_sim.MIN_DL_RBS_DICTIONARY[self.bandwidth]
-        min_ul_rbs = lte_sim.MIN_UL_RBS_DICTIONARY[self.bandwidth]
-
-        def percentage_to_amount(min_val, max_val, percentage):
-            """ Returns the integer between min_val and max_val that is closest
-            to percentage/100*max_val
-            """
-
-            # Calculate the value that corresponds to the required percentage.
-            closest_int = round(max_val * percentage / 100)
-            # Cannot be less than min_val
-            closest_int = max(closest_int, min_val)
-            # RBs cannot be more than max_rbs
-            closest_int = min(closest_int, max_val)
-
-            return closest_int
-
-        # Calculate the number of DL RBs
-
-        # Get the number of DL RBs that corresponds to
-        #  the required percentage.
-        desired_dl_rbs = percentage_to_amount(min_val=min_dl_rbs,
-                                              max_val=max_rbs,
-                                              percentage=dl)
-
-        if self.transmission_mode == lte_sim.TransmissionMode.TM3 or \
-                self.transmission_mode == lte_sim.TransmissionMode.TM4:
-
-            # For TM3 and TM4 the number of DL RBs needs to be max_rbs or a
-            # multiple of the RBG size
-
-            if desired_dl_rbs == max_rbs:
-                dl_rbs = max_rbs
-            else:
-                dl_rbs = (math.ceil(
-                    desired_dl_rbs / lte_sim.RBG_DICTIONARY[self.bandwidth]) *
-                          lte_sim.RBG_DICTIONARY[self.bandwidth])
-
-        else:
-            # The other TMs allow any number of RBs between 1 and max_rbs
-            dl_rbs = desired_dl_rbs
-
-        # Calculate the number of UL RBs
-
-        # Get the number of UL RBs that corresponds
-        # to the required percentage
-        desired_ul_rbs = percentage_to_amount(min_val=min_ul_rbs,
-                                              max_val=max_rbs,
-                                              percentage=ul)
-
-        # Create a list of all possible UL RBs assignment
-        # The standard allows any number that can be written as
-        # 2**a * 3**b * 5**c for any combination of a, b and c.
-
-        def pow_range(max_value, base):
-            """ Returns a range of all possible powers of base under
-              the given max_value.
-          """
-            return range(int(math.ceil(math.log(max_value, base))))
-
-        possible_ul_rbs = [
-            2 ** a * 3 ** b * 5 ** c for a in pow_range(max_rbs, 2)
-            for b in pow_range(max_rbs, 3)
-            for c in pow_range(max_rbs, 5)
-            if 2 ** a * 3 ** b * 5 ** c <= max_rbs]  # yapf: disable
-
-        # Find the value in the list that is closest to desired_ul_rbs
-        differences = [abs(rbs - desired_ul_rbs) for rbs in possible_ul_rbs]
-        ul_rbs = possible_ul_rbs[differences.index(min(differences))]
-
-        # Report what are the obtained RB percentages
-        self.log.info("Requested a {}% / {}% RB allocation. Closest possible "
-                      "percentages are {}% / {}%.".format(
-                          dl, ul, round(100 * dl_rbs / max_rbs),
-                          round(100 * ul_rbs / max_rbs)))
-
-        return dl_rbs, ul_rbs
diff --git a/src/antlion/controllers/cellular_lib/LteImsSimulation.py b/src/antlion/controllers/cellular_lib/LteImsSimulation.py
deleted file mode 100644
index 9f74714..0000000
--- a/src/antlion/controllers/cellular_lib/LteImsSimulation.py
+++ /dev/null
@@ -1,77 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import time
-
-import antlion.controllers.anritsu_lib.md8475a as md8475a
-from antlion.controllers.cellular_lib.LteSimulation import LteSimulation
-
-# Time to wait for Anritsu's IMS CSCF state change
-MAX_WAIT_TIME_IMS_CSCF_STATE = 30
-# default ims virtual network id for Anritsu ims call test.
-DEFAULT_IMS_VIRTUAL_NETWORK_ID = 1
-
-
-class LteImsSimulation(LteSimulation):
-
-    LTE_BASIC_SIM_FILE = 'VoLTE_ATT_Sim.wnssp'
-    LTE_BASIC_CELL_FILE = 'VoLTE_ATT_Cell.wnscp'
-
-    def attach(self):
-        """ After attaching verify the UE has registered with the IMS server.
-
-        Returns:
-            True if the phone was able to attach, False if not.
-        """
-
-        if not super().attach():
-            return False
-
-        # The phone should have registered with the IMS server before attaching.
-        # Make sure the IMS registration was successful by verifying the CSCF
-        # status is SIP IDLE.
-        if not _wait_for_ims_cscf_status(
-                self.log,
-                self.simulator.anritsu,
-                DEFAULT_IMS_VIRTUAL_NETWORK_ID,
-                md8475a.ImsCscfStatus.SIPIDLE.value):
-            self.log.error('UE failed to register with the IMS server.')
-            return False
-
-        return True
-
-
-def _wait_for_ims_cscf_status(log,
-                              anritsu_handle,
-                              virtual_network_id,
-                              status,
-                              timeout=MAX_WAIT_TIME_IMS_CSCF_STATE):
-    """ Wait for IMS CSCF to be in expected state.
-
-    Args:
-        log: log object
-        anritsu_handle: anritsu object
-        virtual_network_id: virtual network id to be monitored
-        status: expected status
-        timeout: wait time
-    """
-    sleep_interval = 1
-    wait_time = timeout
-    while wait_time > 0:
-        if status == anritsu_handle.get_ims_cscf_status(virtual_network_id):
-            return True
-        time.sleep(sleep_interval)
-        wait_time = wait_time - sleep_interval
-    return False
diff --git a/src/antlion/controllers/cellular_lib/LteSimulation.py b/src/antlion/controllers/cellular_lib/LteSimulation.py
deleted file mode 100644
index b811a90..0000000
--- a/src/antlion/controllers/cellular_lib/LteSimulation.py
+++ /dev/null
@@ -1,923 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import time
-from enum import Enum
-
-from antlion.controllers.cellular_lib.BaseSimulation import BaseSimulation
-from antlion.controllers.cellular_lib.LteCellConfig import LteCellConfig
-from antlion.controllers.cellular_lib.NrCellConfig import NrCellConfig
-from antlion.controllers.cellular_lib import BaseCellularDut
-
-
-class TransmissionMode(Enum):
-    """ Transmission modes for LTE (e.g., TM1, TM4, ...) """
-    TM1 = "TM1"
-    TM2 = "TM2"
-    TM3 = "TM3"
-    TM4 = "TM4"
-    TM7 = "TM7"
-    TM8 = "TM8"
-    TM9 = "TM9"
-
-
-class MimoMode(Enum):
-    """ Mimo modes """
-    MIMO_1x1 = "1x1"
-    MIMO_2x2 = "2x2"
-    MIMO_4x4 = "4x4"
-
-
-class SchedulingMode(Enum):
-    """ Traffic scheduling modes (e.g., STATIC, DYNAMIC) """
-    DYNAMIC = "DYNAMIC"
-    STATIC = "STATIC"
-
-
-class DuplexMode(Enum):
-    """ DL/UL Duplex mode """
-    FDD = "FDD"
-    TDD = "TDD"
-
-
-class ModulationType(Enum):
-    """DL/UL Modulation order."""
-    QPSK = 'QPSK'
-    Q16 = '16QAM'
-    Q64 = '64QAM'
-    Q256 = '256QAM'
-
-
-# Bandwidth [MHz] to RB group size
-RBG_DICTIONARY = {20: 4, 15: 4, 10: 3, 5: 2, 3: 2, 1.4: 1}
-
-# Bandwidth [MHz] to total RBs mapping
-TOTAL_RBS_DICTIONARY = {20: 100, 15: 75, 10: 50, 5: 25, 3: 15, 1.4: 6}
-
-# Bandwidth [MHz] to minimum number of DL RBs that can be assigned to a UE
-MIN_DL_RBS_DICTIONARY = {20: 16, 15: 12, 10: 9, 5: 4, 3: 4, 1.4: 2}
-
-# Bandwidth [MHz] to minimum number of UL RBs that can be assigned to a UE
-MIN_UL_RBS_DICTIONARY = {20: 8, 15: 6, 10: 4, 5: 2, 3: 2, 1.4: 1}
-
-
-class LteSimulation(BaseSimulation):
-    """ Single-carrier LTE simulation. """
-    # Test config keywords
-    KEY_FREQ_BANDS = "freq_bands"
-
-    # Cell param keywords
-    PARAM_RRC_STATUS_CHANGE_TIMER = "rrcstatuschangetimer"
-
-    # Units in which signal level is defined in DOWNLINK_SIGNAL_LEVEL_DICTIONARY
-    DOWNLINK_SIGNAL_LEVEL_UNITS = "RSRP"
-
-    # RSRP signal levels thresholds (as reported by Android) in dBm/15KHz.
-    # Excellent is set to -75 since callbox B Tx power is limited to -30 dBm
-    DOWNLINK_SIGNAL_LEVEL_DICTIONARY = {
-        'excellent': -75,
-        'high': -110,
-        'medium': -115,
-        'weak': -120,
-        'disconnected': -170
-    }
-
-    # Transmitted output power for the phone (dBm)
-    UPLINK_SIGNAL_LEVEL_DICTIONARY = {
-        'max': 27,
-        'high': 13,
-        'medium': 3,
-        'low': -20
-    }
-
-    # Allowed bandwidth for each band.
-    allowed_bandwidth_dictionary = {
-        1: [5, 10, 15, 20],
-        2: [1.4, 3, 5, 10, 15, 20],
-        3: [1.4, 3, 5, 10, 15, 20],
-        4: [1.4, 3, 5, 10, 15, 20],
-        5: [1.4, 3, 5, 10],
-        7: [5, 10, 15, 20],
-        8: [1.4, 3, 5, 10],
-        10: [5, 10, 15, 20],
-        11: [5, 10],
-        12: [1.4, 3, 5, 10],
-        13: [5, 10],
-        14: [5, 10],
-        17: [5, 10],
-        18: [5, 10, 15],
-        19: [5, 10, 15],
-        20: [5, 10, 15, 20],
-        21: [5, 10, 15],
-        22: [5, 10, 15, 20],
-        24: [5, 10],
-        25: [1.4, 3, 5, 10, 15, 20],
-        26: [1.4, 3, 5, 10, 15],
-        27: [1.4, 3, 5, 10],
-        28: [3, 5, 10, 15, 20],
-        29: [3, 5, 10],
-        30: [5, 10],
-        31: [1.4, 3, 5],
-        32: [5, 10, 15, 20],
-        33: [5, 10, 15, 20],
-        34: [5, 10, 15],
-        35: [1.4, 3, 5, 10, 15, 20],
-        36: [1.4, 3, 5, 10, 15, 20],
-        37: [5, 10, 15, 20],
-        38: [20],
-        39: [5, 10, 15, 20],
-        40: [5, 10, 15, 20],
-        41: [5, 10, 15, 20],
-        42: [5, 10, 15, 20],
-        43: [5, 10, 15, 20],
-        44: [3, 5, 10, 15, 20],
-        45: [5, 10, 15, 20],
-        46: [10, 20],
-        47: [10, 20],
-        48: [5, 10, 15, 20],
-        49: [10, 20],
-        50: [3, 5, 10, 15, 20],
-        51: [3, 5],
-        52: [5, 10, 15, 20],
-        65: [5, 10, 15, 20],
-        66: [1.4, 3, 5, 10, 15, 20],
-        67: [5, 10, 15, 20],
-        68: [5, 10, 15],
-        69: [5],
-        70: [5, 10, 15],
-        71: [5, 10, 15, 20],
-        72: [1.4, 3, 5],
-        73: [1.4, 3, 5],
-        74: [1.4, 3, 5, 10, 15, 20],
-        75: [5, 10, 15, 20],
-        76: [5],
-        85: [5, 10],
-        252: [20],
-        255: [20]
-    }
-
-    # Dictionary of lower DL channel number bound for each band.
-    LOWEST_DL_CN_DICTIONARY = {
-        1: 0,
-        2: 600,
-        3: 1200,
-        4: 1950,
-        5: 2400,
-        6: 2650,
-        7: 2750,
-        8: 3450,
-        9: 3800,
-        10: 4150,
-        11: 4750,
-        12: 5010,
-        13: 5180,
-        14: 5280,
-        17: 5730,
-        18: 5850,
-        19: 6000,
-        20: 6150,
-        21: 6450,
-        22: 6600,
-        23: 7500,
-        24: 7700,
-        25: 8040,
-        26: 8690,
-        27: 9040,
-        28: 9210,
-        29: 9660,
-        30: 9770,
-        31: 9870,
-        32: 9920,
-        33: 36000,
-        34: 36200,
-        35: 36350,
-        36: 36950,
-        37: 37550,
-        38: 37750,
-        39: 38250,
-        40: 38650,
-        41: 39650,
-        42: 41590,
-        43: 45590,
-        66: 66436,
-        67: 67336
-    }
-
-    # Peak throughput lookup tables for each TDD subframe
-    # configuration and bandwidth
-    # yapf: disable
-    tdd_config4_tput_lut = {
-        0: {
-            5: {'DL': 3.82, 'UL': 2.63},
-            10: {'DL': 11.31,'UL': 9.03},
-            15: {'DL': 16.9, 'UL': 20.62},
-            20: {'DL': 22.88, 'UL': 28.43}
-        },
-        1: {
-            5: {'DL': 6.13, 'UL': 4.08},
-            10: {'DL': 18.36, 'UL': 9.69},
-            15: {'DL': 28.62, 'UL': 14.21},
-            20: {'DL': 39.04, 'UL': 19.23}
-        },
-        2: {
-            5: {'DL': 5.68, 'UL': 2.30},
-            10: {'DL': 25.51, 'UL': 4.68},
-            15: {'DL': 39.3, 'UL': 7.13},
-            20: {'DL': 53.64, 'UL': 9.72}
-        },
-        3: {
-            5: {'DL': 8.26, 'UL': 3.45},
-            10: {'DL': 23.20, 'UL': 6.99},
-            15: {'DL': 35.35, 'UL': 10.75},
-            20: {'DL': 48.3, 'UL': 14.6}
-        },
-        4: {
-            5: {'DL': 6.16, 'UL': 2.30},
-            10: {'DL': 26.77, 'UL': 4.68},
-            15: {'DL': 40.7, 'UL': 7.18},
-            20: {'DL': 55.6, 'UL': 9.73}
-        },
-        5: {
-            5: {'DL': 6.91, 'UL': 1.12},
-            10: {'DL': 30.33, 'UL': 2.33},
-            15: {'DL': 46.04, 'UL': 3.54},
-            20: {'DL': 62.9, 'UL': 4.83}
-        },
-        6: {
-            5: {'DL': 6.13, 'UL': 4.13},
-            10: {'DL': 14.79, 'UL': 11.98},
-            15: {'DL': 23.28, 'UL': 17.46},
-            20: {'DL': 31.75, 'UL': 23.95}
-        }
-    }
-
-    tdd_config3_tput_lut = {
-        0: {
-            5: {'DL': 5.04, 'UL': 3.7},
-            10: {'DL': 15.11, 'UL': 17.56},
-            15: {'DL': 22.59, 'UL': 30.31},
-            20: {'DL': 30.41, 'UL': 41.61}
-        },
-        1: {
-            5: {'DL': 8.07, 'UL': 5.66},
-            10: {'DL': 24.58, 'UL': 13.66},
-            15: {'DL': 39.05, 'UL': 20.68},
-            20: {'DL': 51.59, 'UL': 28.76}
-        },
-        2: {
-            5: {'DL': 7.59, 'UL': 3.31},
-            10: {'DL': 34.08, 'UL': 6.93},
-            15: {'DL': 53.64, 'UL': 10.51},
-            20: {'DL': 70.55, 'UL': 14.41}
-        },
-        3: {
-            5: {'DL': 10.9, 'UL': 5.0},
-            10: {'DL': 30.99, 'UL': 10.25},
-            15: {'DL': 48.3, 'UL': 15.81},
-            20: {'DL': 63.24, 'UL': 21.65}
-        },
-        4: {
-            5: {'DL': 8.11, 'UL': 3.32},
-            10: {'DL': 35.74, 'UL': 6.95},
-            15: {'DL': 55.6, 'UL': 10.51},
-            20: {'DL': 72.72, 'UL': 14.41}
-        },
-        5: {
-            5: {'DL': 9.28, 'UL': 1.57},
-            10: {'DL': 40.49, 'UL': 3.44},
-            15: {'DL': 62.9, 'UL': 5.23},
-            20: {'DL': 82.21, 'UL': 7.15}
-        },
-        6: {
-            5: {'DL': 8.06, 'UL': 5.74},
-            10: {'DL': 19.82, 'UL': 17.51},
-            15: {'DL': 31.75, 'UL': 25.77},
-            20: {'DL': 42.12, 'UL': 34.91}
-        }
-    }
-
-    tdd_config2_tput_lut = {
-        0: {
-            5: {'DL': 3.11, 'UL': 2.55},
-            10: {'DL': 9.93, 'UL': 11.1},
-            15: {'DL': 13.9, 'UL': 21.51},
-            20: {'DL': 20.02, 'UL': 41.66}
-        },
-        1: {
-            5: {'DL': 5.33, 'UL': 4.27},
-            10: {'DL': 15.14, 'UL': 13.95},
-            15: {'DL': 33.84, 'UL': 19.73},
-            20: {'DL': 44.61, 'UL': 27.35}
-        },
-        2: {
-            5: {'DL': 6.87, 'UL': 3.32},
-            10: {'DL': 17.06, 'UL': 6.76},
-            15: {'DL': 49.63, 'UL': 10.5},
-            20: {'DL': 65.2, 'UL': 14.41}
-        },
-        3: {
-            5: {'DL': 5.41, 'UL': 4.17},
-            10: {'DL': 16.89, 'UL': 9.73},
-            15: {'DL': 44.29, 'UL': 15.7},
-            20: {'DL': 53.95, 'UL': 19.85}
-        },
-        4: {
-            5: {'DL': 8.7, 'UL': 3.32},
-            10: {'DL': 17.58, 'UL': 6.76},
-            15: {'DL': 51.08, 'UL': 10.47},
-            20: {'DL': 66.45, 'UL': 14.38}
-        },
-        5: {
-            5: {'DL': 9.46, 'UL': 1.55},
-            10: {'DL': 19.02, 'UL': 3.48},
-            15: {'DL': 58.89, 'UL': 5.23},
-            20: {'DL': 76.85, 'UL': 7.1}
-        },
-        6: {
-            5: {'DL': 4.74, 'UL': 3.9},
-            10: {'DL': 12.32, 'UL': 13.37},
-            15: {'DL': 27.74, 'UL': 25.02},
-            20: {'DL': 35.48, 'UL': 32.95}
-        }
-    }
-
-    tdd_config1_tput_lut = {
-        0: {
-            5: {'DL': 4.25, 'UL': 3.35},
-            10: {'DL': 8.38, 'UL': 7.22},
-            15: {'DL': 12.41, 'UL': 13.91},
-            20: {'DL': 16.27, 'UL': 24.09}
-        },
-        1: {
-            5: {'DL': 7.28, 'UL': 4.61},
-            10: {'DL': 14.73, 'UL': 9.69},
-            15: {'DL': 21.91, 'UL': 13.86},
-            20: {'DL': 27.63, 'UL': 17.18}
-        },
-        2: {
-            5: {'DL': 10.37, 'UL': 2.27},
-            10: {'DL': 20.92, 'UL': 4.66},
-            15: {'DL': 31.01, 'UL': 7.04},
-            20: {'DL': 42.03, 'UL': 9.75}
-        },
-        3: {
-            5: {'DL': 9.25, 'UL': 3.44},
-            10: {'DL': 18.38, 'UL': 6.95},
-            15: {'DL': 27.59, 'UL': 10.62},
-            20: {'DL': 34.85, 'UL': 13.45}
-        },
-        4: {
-            5: {'DL': 10.71, 'UL': 2.26},
-            10: {'DL': 21.54, 'UL': 4.67},
-            15: {'DL': 31.91, 'UL': 7.2},
-            20: {'DL': 43.35, 'UL': 9.74}
-        },
-        5: {
-            5: {'DL': 12.34, 'UL': 1.08},
-            10: {'DL': 24.78, 'UL': 2.34},
-            15: {'DL': 36.68, 'UL': 3.57},
-            20: {'DL': 49.84, 'UL': 4.81}
-        },
-        6: {
-            5: {'DL': 5.76, 'UL': 4.41},
-            10: {'DL': 11.68, 'UL': 9.7},
-            15: {'DL': 17.34, 'UL': 17.95},
-            20: {'DL': 23.5, 'UL': 23.42}
-        }
-    }
-    # yapf: enable
-
-    # Peak throughput lookup table dictionary
-    tdd_config_tput_lut_dict = {
-        'TDD_CONFIG1':
-        tdd_config1_tput_lut,  # DL 256QAM, UL 64QAM & MAC padding turned OFF
-        'TDD_CONFIG2':
-        tdd_config2_tput_lut,  # DL 256QAM, UL 64 QAM ON & MAC padding OFF
-        'TDD_CONFIG3':
-        tdd_config3_tput_lut,  # DL 256QAM, UL 64QAM & MAC padding ON
-        'TDD_CONFIG4':
-        tdd_config4_tput_lut  # DL 256QAM, UL 64 QAM OFF & MAC padding ON
-    }
-
-    def __init__(
-        self, simulator, log, dut, test_config, calibration_table,
-        nr_mode=None):
-        """ Initializes the simulator for a single-carrier LTE simulation.
-
-        Args:
-            simulator: a cellular simulator controller
-            log: a logger handle
-            dut: a device handler implementing BaseCellularDut
-            test_config: test configuration obtained from the config file
-            calibration_table: a dictionary containing path losses for
-                different bands.
-
-        """
-
-        super().__init__(
-            simulator, log, dut, test_config, calibration_table, nr_mode)
-
-        self.num_carriers = None
-
-        # Force device to LTE only so that it connects faster
-        try:
-            if self.nr_mode and 'nr' == self.nr_mode:
-                self.dut.set_preferred_network_type(
-                    BaseCellularDut.PreferredNetworkType.LTE_NR)
-            else:
-                self.dut.set_preferred_network_type(
-                    BaseCellularDut.PreferredNetworkType.LTE_ONLY)
-        except Exception as e:
-            # If this fails the test should be able to run anyways, even if it
-            # takes longer to find the cell.
-            self.log.warning('Setting preferred RAT failed: ' + str(e))
-
-        # Get LTE CA frequency bands setting from the test configuration
-        if self.KEY_FREQ_BANDS not in test_config:
-            self.log.warning("The key '{}' is not set in the config file. "
-                             "Setting to null by default.".format(
-                                 self.KEY_FREQ_BANDS))
-
-        self.freq_bands = test_config.get(self.KEY_FREQ_BANDS, True)
-
-    def setup_simulator(self):
-        """ Do initial configuration in the simulator. """
-        if self.nr_mode and 'nr' == self.nr_mode:
-            self.log.info('Initializes the callbox to Nr Nsa scenario')
-            self.simulator.setup_nr_nsa_scenario()
-        else:
-            self.log.info('Initializes the callbox to LTE scenario')
-            self.simulator.setup_lte_scenario()
-
-    def configure(self, parameters):
-        """ Configures simulation using a dictionary of parameters.
-
-        Processes LTE configuration parameters.
-
-        Args:
-            parameters: a configuration dictionary if there is only one carrier,
-                a list if there are multiple cells.
-        """
-        # If there is a single item, put in a list
-        if not isinstance(parameters, list):
-            parameters = [parameters]
-
-        # Pass only PCC configs to BaseSimulation
-        super().configure(parameters[0])
-
-        new_cell_list = []
-        for cell in parameters:
-            if LteCellConfig.PARAM_BAND not in cell:
-                raise ValueError(
-                    "The configuration dictionary must include a key '{}' with "
-                    "the required band number.".format(
-                        LteCellConfig.PARAM_BAND))
-
-            band = cell[LteCellConfig.PARAM_BAND]
-
-            if isinstance(band, str) and not band.isdigit():
-                # If band starts with n then it is an NR band
-                if band[0] == 'n' and band[1:].isdigit():
-                    # If the remaining string is only the band number, add
-                    # the cell and continue
-                    new_cell_list.append(cell)
-                    continue
-
-                ca_class = band[-1].upper()
-                band_num = band[:-1]
-
-                if ca_class in ['A', 'C']:
-                    # Remove the CA class label and add the cell
-                    cell[LteCellConfig.PARAM_BAND] = band_num
-                    new_cell_list.append(cell)
-                elif ca_class == 'B':
-                    raise RuntimeError('Class B LTE CA not supported.')
-                else:
-                    raise ValueError('Invalid band value: ' + band)
-
-                # Class C means that there are two contiguous carriers
-                if ca_class == 'C':
-                    new_cell_list.append(dict(cell))
-                    bw = int(cell[LteCellConfig.PARAM_BW])
-                    dl_earfcn = LteCellConfig.PARAM_DL_EARFCN
-                    new_cell_list[-1][dl_earfcn] = self.LOWEST_DL_CN_DICTIONARY[
-                        int(band_num)] + bw * 10 - 2
-            else:
-                # The band is just a number, so just add it to the list
-                new_cell_list.append(cell)
-
-        # Logs new_cell_list for debug
-        self.log.info('new cell list: {}'.format(new_cell_list))
-
-        self.simulator.set_band_combination(
-            [c[LteCellConfig.PARAM_BAND] for c in new_cell_list])
-
-        self.num_carriers = len(new_cell_list)
-
-        # Setup the base stations with the obtain configuration
-        self.cell_configs = []
-        for i in range(self.num_carriers):
-            band = new_cell_list[i][LteCellConfig.PARAM_BAND]
-            if isinstance(band, str) and band[0] == 'n':
-                self.cell_configs.append(NrCellConfig(self.log))
-            else:
-                self.cell_configs.append(LteCellConfig(self.log))
-            self.cell_configs[i].configure(new_cell_list[i])
-            self.simulator.configure_bts(self.cell_configs[i], i)
-
-        # Now that the band is set, calibrate the link if necessary
-        self.load_pathloss_if_required()
-
-        # This shouldn't be a cell parameter but instead a simulation config
-        # Setup LTE RRC status change function and timer for LTE idle test case
-        if self.PARAM_RRC_STATUS_CHANGE_TIMER not in parameters[0]:
-            self.log.info(
-                "The test config does not include the '{}' key. Disabled "
-                "by default.".format(self.PARAM_RRC_STATUS_CHANGE_TIMER))
-            self.simulator.set_lte_rrc_state_change_timer(False)
-        else:
-            timer = int(parameters[0][self.PARAM_RRC_STATUS_CHANGE_TIMER])
-            self.simulator.set_lte_rrc_state_change_timer(True, timer)
-            self.rrc_sc_timer = timer
-
-    def calibrated_downlink_rx_power(self, bts_config, rsrp):
-        """ LTE simulation overrides this method so that it can convert from
-        RSRP to total signal power transmitted from the basestation.
-
-        Args:
-            bts_config: the current configuration at the base station
-            rsrp: desired rsrp, contained in a key value pair
-        """
-
-        power = self.rsrp_to_signal_power(rsrp, bts_config)
-
-        self.log.info(
-            "Setting downlink signal level to {} RSRP ({} dBm)".format(
-                rsrp, power))
-
-        # Use parent method to calculate signal level
-        return super().calibrated_downlink_rx_power(bts_config, power)
-
-    def downlink_calibration(self, rat=None, power_units_conversion_func=None):
-        """ Computes downlink path loss and returns the calibration value.
-
-        See base class implementation for details.
-
-        Args:
-            rat: ignored, replaced by 'lteRsrp'
-            power_units_conversion_func: ignored, replaced by
-                self.rsrp_to_signal_power
-
-        Returns:
-            Downlink calibration value and measured DL power. Note that the
-            phone only reports RSRP of the primary chain
-        """
-
-        return super().downlink_calibration(
-            rat='lteDbm',
-            power_units_conversion_func=self.rsrp_to_signal_power)
-
-    def rsrp_to_signal_power(self, rsrp, bts_config):
-        """ Converts rsrp to total band signal power
-
-        RSRP is measured per subcarrier, so total band power needs to be
-        multiplied by the number of subcarriers being used.
-
-        Args:
-            rsrp: desired rsrp in dBm
-            bts_config: a base station configuration object
-        Returns:
-            Total band signal power in dBm
-        """
-
-        bandwidth = bts_config.bandwidth
-
-        if bandwidth == 100: # This assumes 273 RBs. TODO: b/229163022
-            power = rsrp + 35.15
-        elif bandwidth == 20:  # 100 RBs
-            power = rsrp + 30.79
-        elif bandwidth == 15:  # 75 RBs
-            power = rsrp + 29.54
-        elif bandwidth == 10:  # 50 RBs
-            power = rsrp + 27.78
-        elif bandwidth == 5:  # 25 RBs
-            power = rsrp + 24.77
-        elif bandwidth == 3:  # 15 RBs
-            power = rsrp + 22.55
-        elif bandwidth == 1.4:  # 6 RBs
-            power = rsrp + 18.57
-        else:
-            raise ValueError("Invalid bandwidth value.")
-
-        return power
-
-    def maximum_downlink_throughput(self):
-        """ Calculates maximum achievable downlink throughput in the current
-            simulation state.
-
-        Returns:
-            Maximum throughput in mbps.
-
-        """
-        return sum(
-            self.bts_maximum_downlink_throughtput(self.cell_configs[bts_index])
-            for bts_index in range(self.num_carriers))
-
-    def bts_maximum_downlink_throughtput(self, bts_config):
-        """ Calculates maximum achievable downlink throughput for a single
-        base station from its configuration object.
-
-        Args:
-            bts_config: a base station configuration object.
-
-        Returns:
-            Maximum throughput in mbps.
-
-        """
-        if bts_config.mimo_mode == MimoMode.MIMO_1x1:
-            streams = 1
-        elif bts_config.mimo_mode == MimoMode.MIMO_2x2:
-            streams = 2
-        elif bts_config.mimo_mode == MimoMode.MIMO_4x4:
-            streams = 4
-        else:
-            raise ValueError('Unable to calculate maximum downlink throughput '
-                             'because the MIMO mode has not been set.')
-
-        bandwidth = bts_config.bandwidth
-        rb_ratio = bts_config.dl_rbs / TOTAL_RBS_DICTIONARY[bandwidth]
-        mcs = bts_config.dl_mcs
-
-        max_rate_per_stream = None
-
-        tdd_subframe_config = bts_config.dlul_config
-        duplex_mode = bts_config.get_duplex_mode()
-
-        if duplex_mode == DuplexMode.TDD:
-            if bts_config.dl_256_qam_enabled:
-                if mcs == 27:
-                    if bts_config.mac_padding:
-                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
-                            'TDD_CONFIG3'][tdd_subframe_config][bandwidth][
-                                'DL']
-                    else:
-                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
-                            'TDD_CONFIG2'][tdd_subframe_config][bandwidth][
-                                'DL']
-            else:
-                if mcs == 28:
-                    if bts_config.mac_padding:
-                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
-                            'TDD_CONFIG4'][tdd_subframe_config][bandwidth][
-                                'DL']
-                    else:
-                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
-                            'TDD_CONFIG1'][tdd_subframe_config][bandwidth][
-                                'DL']
-
-        elif duplex_mode == DuplexMode.FDD:
-            if (not bts_config.dl_256_qam_enabled and bts_config.mac_padding
-                    and mcs == 28):
-                max_rate_per_stream = {
-                    3: 9.96,
-                    5: 17.0,
-                    10: 34.7,
-                    15: 52.7,
-                    20: 72.2
-                }.get(bandwidth, None)
-            if (not bts_config.dl_256_qam_enabled and bts_config.mac_padding
-                    and mcs == 27):
-                max_rate_per_stream = {
-                    1.4: 2.94,
-                }.get(bandwidth, None)
-            elif (not bts_config.dl_256_qam_enabled
-                  and not bts_config.mac_padding and mcs == 27):
-                max_rate_per_stream = {
-                    1.4: 2.87,
-                    3: 7.7,
-                    5: 14.4,
-                    10: 28.7,
-                    15: 42.3,
-                    20: 57.7
-                }.get(bandwidth, None)
-            elif bts_config.dl_256_qam_enabled and bts_config.mac_padding and mcs == 27:
-                max_rate_per_stream = {
-                    3: 13.2,
-                    5: 22.9,
-                    10: 46.3,
-                    15: 72.2,
-                    20: 93.9
-                }.get(bandwidth, None)
-            elif bts_config.dl_256_qam_enabled and bts_config.mac_padding and mcs == 26:
-                max_rate_per_stream = {
-                    1.4: 3.96,
-                }.get(bandwidth, None)
-            elif (bts_config.dl_256_qam_enabled and not bts_config.mac_padding
-                  and mcs == 27):
-                max_rate_per_stream = {
-                    3: 11.3,
-                    5: 19.8,
-                    10: 44.1,
-                    15: 68.1,
-                    20: 88.4
-                }.get(bandwidth, None)
-            elif (bts_config.dl_256_qam_enabled and not bts_config.mac_padding
-                  and mcs == 26):
-                max_rate_per_stream = {
-                    1.4: 3.96,
-                }.get(bandwidth, None)
-
-        if not max_rate_per_stream:
-            raise NotImplementedError(
-                "The calculation for MAC padding = {} "
-                "and mcs = {} is not implemented.".format(
-                    "FULLALLOCATION" if bts_config.mac_padding else "OFF",
-                    mcs))
-
-        return max_rate_per_stream * streams * rb_ratio
-
-    def maximum_uplink_throughput(self):
-        """ Calculates maximum achievable uplink throughput in the current
-            simulation state.
-
-        Returns:
-            Maximum throughput in mbps.
-
-        """
-
-        return self.bts_maximum_uplink_throughtput(self.cell_configs[0])
-
-    def bts_maximum_uplink_throughtput(self, bts_config):
-        """ Calculates maximum achievable uplink throughput for the selected
-        basestation from its configuration object.
-
-        Args:
-            bts_config: an LTE base station configuration object.
-
-        Returns:
-            Maximum throughput in mbps.
-
-        """
-
-        bandwidth = bts_config.bandwidth
-        rb_ratio = bts_config.ul_rbs / TOTAL_RBS_DICTIONARY[bandwidth]
-        mcs = bts_config.ul_mcs
-
-        max_rate_per_stream = None
-
-        tdd_subframe_config = bts_config.dlul_config
-        duplex_mode = bts_config.get_duplex_mode()
-
-        if duplex_mode == DuplexMode.TDD:
-            if bts_config.ul_64_qam_enabled:
-                if mcs == 28:
-                    if bts_config.mac_padding:
-                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
-                            'TDD_CONFIG3'][tdd_subframe_config][bandwidth][
-                                'UL']
-                    else:
-                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
-                            'TDD_CONFIG2'][tdd_subframe_config][bandwidth][
-                                'UL']
-            else:
-                if mcs == 23:
-                    if bts_config.mac_padding:
-                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
-                            'TDD_CONFIG4'][tdd_subframe_config][bandwidth][
-                                'UL']
-                    else:
-                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
-                            'TDD_CONFIG1'][tdd_subframe_config][bandwidth][
-                                'UL']
-
-        elif duplex_mode == DuplexMode.FDD:
-            if mcs == 23 and not bts_config.ul_64_qam_enabled:
-                max_rate_per_stream = {
-                    1.4: 2.85,
-                    3: 7.18,
-                    5: 12.1,
-                    10: 24.5,
-                    15: 36.5,
-                    20: 49.1
-                }.get(bandwidth, None)
-            elif mcs == 28 and bts_config.ul_64_qam_enabled:
-                max_rate_per_stream = {
-                    1.4: 4.2,
-                    3: 10.5,
-                    5: 17.2,
-                    10: 35.3,
-                    15: 53.0,
-                    20: 72.6
-                }.get(bandwidth, None)
-
-        if not max_rate_per_stream:
-            raise NotImplementedError(
-                "The calculation fir mcs = {} is not implemented.".format(
-                    "FULLALLOCATION" if bts_config.mac_padding else "OFF",
-                    mcs))
-
-        return max_rate_per_stream * rb_ratio
-
-    def calibrate(self, band):
-        """ Calculates UL and DL path loss if it wasn't done before
-
-        Before running the base class implementation, configure the base station
-        to only use one downlink antenna with maximum bandwidth.
-
-        Args:
-            band: the band that is currently being calibrated.
-        """
-
-        # Save initial values in a configuration object so they can be restored
-        restore_config = LteCellConfig(self.log)
-        restore_config.mimo_mode = self.cell_configs[0].mimo_mode
-        restore_config.transmission_mode = \
-            self.cell_configs[0].transmission_mode
-        restore_config.bandwidth = self.cell_configs[0].bandwidth
-
-        # Set up a temporary calibration configuration.
-        temporary_config = LteCellConfig(self.log)
-        temporary_config.mimo_mode = MimoMode.MIMO_1x1
-        temporary_config.transmission_mode = TransmissionMode.TM1
-        temporary_config.bandwidth = max(
-            self.allowed_bandwidth_dictionary[int(band)])
-        self.simulator.configure_bts(temporary_config)
-        self.cell_configs[0].incorporate(temporary_config)
-
-        super().calibrate(band)
-
-        # Restore values as they were before changing them for calibration.
-        self.simulator.configure_bts(restore_config)
-        self.cell_configs[0].incorporate(restore_config)
-
-    def start_traffic_for_calibration(self):
-        """ If MAC padding is enabled, there is no need to start IP traffic. """
-        if not self.cell_configs[0].mac_padding:
-            super().start_traffic_for_calibration()
-
-    def stop_traffic_for_calibration(self):
-        """ If MAC padding is enabled, IP traffic wasn't started. """
-        if not self.cell_configs[0].mac_padding:
-            super().stop_traffic_for_calibration()
-
-    def get_measured_ul_power(self, samples=5, wait_after_sample=3):
-        """ Calculates UL power using measurements from the callbox and the
-        calibration data.
-
-        Args:
-            samples: the numble of samples to average
-            wait_after_sample: time in seconds to wait in between samples
-
-        Returns:
-            the ul power at the UE antenna ports in dBs
-        """
-        ul_power_sum = 0
-        samples_left = samples
-
-        while samples_left > 0:
-            ul_power_sum += self.simulator.get_measured_pusch_power()
-            samples_left -= 1
-            time.sleep(wait_after_sample)
-
-        # Got enough samples, return calibrated average
-        if self.dl_path_loss:
-            return ul_power_sum / samples + self.ul_path_loss
-        else:
-            self.log.warning('No uplink calibration data. Returning '
-                             'uncalibrated values as measured by the '
-                             'callbox.')
-            return ul_power_sum / samples
-
-    def start(self):
-        """ Set the signal level for the secondary carriers, as the base class
-        implementation of this method will only set up downlink power for the
-        primary carrier component.
-
-        After that, attaches the secondary carriers."""
-
-        super().start()
-
-        if self.num_carriers > 1:
-            if self.sim_dl_power:
-                self.log.info('Setting DL power for secondary carriers.')
-
-                for bts_index in range(1, self.num_carriers):
-                    new_config = LteCellConfig(self.log)
-                    new_config.output_power = self.calibrated_downlink_rx_power(
-                        self.cell_configs[bts_index], self.sim_dl_power)
-                    self.simulator.configure_bts(new_config, bts_index)
-                    self.cell_configs[bts_index].incorporate(new_config)
-
-            self.simulator.lte_attach_secondary_carriers(self.freq_bands)
diff --git a/src/antlion/controllers/cellular_lib/NrCellConfig.py b/src/antlion/controllers/cellular_lib/NrCellConfig.py
deleted file mode 100644
index ff72dae..0000000
--- a/src/antlion/controllers/cellular_lib/NrCellConfig.py
+++ /dev/null
@@ -1,137 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import antlion.controllers.cellular_lib.BaseCellConfig as base_cell
-import antlion.controllers.cellular_lib.LteSimulation as lte_sim
-
-
-class NrCellConfig(base_cell.BaseCellConfig):
-    """ NR cell configuration class.
-
-    Attributes:
-        band: an integer indicating the required band number.
-        bandwidth: a integer indicating the required channel bandwidth
-    """
-
-    PARAM_BAND = 'band'
-    PARAM_BW = 'bw'
-    PARAM_DL_MCS = 'dlmcs'
-    PARAM_DL_RBS = 'dl_rbs'
-    PARAM_PADDING = 'mac_padding'
-    PARAM_MIMO = 'mimo'
-    PARAM_NRARFCN = 'nr_arfcn'
-    PARAM_SCHEDULING = "scheduling"
-    PARAM_SCHEDULING_DYNAMIC = "dynamic"
-    PARAM_SCHEDULING_STATIC = "static"
-    PARAM_UL_MCS = 'ulmcs'
-    PARAM_UL_RBS = 'ul_rbs'
-
-    def __init__(self, log):
-        """ Initialize the base station config by setting all its
-        parameters to None.
-        Args:
-            log: logger object.
-        """
-        super().__init__(log)
-        self.band = None
-        self.bandwidth = None
-        self.dl_rbs = None
-        self.ul_rbs = None
-        self.dl_mcs = None
-        self.ul_mcs = None
-        self.mac_padding = None
-        self.mimo_mode = None
-        self.nr_arfcn = None
-
-    def configure(self, parameters):
-        """ Configures an NR cell using a dictionary of parameters.
-
-        Args:
-            parameters: a configuration dictionary
-        """
-        if self.PARAM_BAND not in parameters:
-            raise ValueError(
-                "The configuration dictionary must include a key '{}' with "
-                "the required band number.".format(self.PARAM_BAND))
-        nr_band = parameters[self.PARAM_BAND]
-        if nr_band[0] == 'n':
-            nr_band = nr_band[1:]
-        self.band = nr_band
-
-        if self.PARAM_NRARFCN in parameters:
-            self.nr_arfcn = int(parameters[self.PARAM_NRARFCN])
-
-        if self.PARAM_BW not in parameters:
-            raise ValueError(
-                "The config dictionary must include parameter {} with an "
-                "int value (to indicate 1.4 MHz use 14).".format(
-                    self.PARAM_BW))
-        bw = float(parameters[self.PARAM_BW])
-
-        if abs(bw - 14) < 0.00000000001:
-            bw = 1.4
-
-        self.bandwidth = bw
-
-        # Setup mimo mode
-        if self.PARAM_MIMO not in parameters:
-            raise ValueError(
-                "The config dictionary must include parameter '{}' with the "
-                "mimo mode.".format(self.PARAM_MIMO))
-
-        for mimo_mode in lte_sim.MimoMode:
-            if parameters[self.PARAM_MIMO] == mimo_mode.value:
-                self.mimo_mode = mimo_mode
-                break
-        else:
-            raise ValueError("The value of {} must be one of the following:"
-                             "1x1, 2x2 or 4x4.".format(self.PARAM_MIMO))
-
-        if self.PARAM_SCHEDULING not in parameters:
-            self.scheduling_mode = lte_sim.SchedulingMode.STATIC
-            self.log.warning(
-                "The test config does not include the '{}' key. Setting to "
-                "static by default.".format(self.PARAM_SCHEDULING))
-        elif parameters[
-                self.PARAM_SCHEDULING] == self.PARAM_SCHEDULING_DYNAMIC:
-            self.scheduling_mode = lte_sim.SchedulingMode.DYNAMIC
-        elif parameters[self.PARAM_SCHEDULING] == self.PARAM_SCHEDULING_STATIC:
-            self.scheduling_mode = lte_sim.SchedulingMode.STATIC
-        else:
-            raise ValueError("Key '{}' must have a value of "
-                             "'dynamic' or 'static'.".format(
-                                 self.PARAM_SCHEDULING))
-
-        if self.scheduling_mode == lte_sim.SchedulingMode.STATIC:
-
-            if self.PARAM_PADDING not in parameters:
-                self.log.warning(
-                    "The '{}' parameter was not set. Enabling MAC padding by "
-                    "default.".format(self.PARAM_PADDING))
-                self.mac_padding = True
-
-            if self.PARAM_DL_MCS in parameters:
-                self.dl_mcs = int(parameters[self.PARAM_DL_MCS])
-
-            if self.PARAM_UL_MCS in parameters:
-                self.ul_mcs = int(parameters[self.PARAM_UL_MCS])
-
-            # Temproraily setting: set 273 for bandwidth of 100 MHz
-            self.dl_rbs = 273
-            self.ul_rbs = 273
-
-    def __str__(self):
-        return str(vars(self))
diff --git a/src/antlion/controllers/cellular_lib/OWNERS b/src/antlion/controllers/cellular_lib/OWNERS
deleted file mode 100644
index f88a96c..0000000
--- a/src/antlion/controllers/cellular_lib/OWNERS
+++ /dev/null
@@ -1,8 +0,0 @@
-iguarna@google.com
-chaoyangf@google.com
-yixiang@google.com
-codycaldwell@google.com
-
-per-file PresetSimulation.py = hmtuan@google.com
-per-file PresetSimulation.py = harjani@google.com
-per-file PresetSimulation.py = jethier@google.com
\ No newline at end of file
diff --git a/src/antlion/controllers/cellular_lib/PresetSimulation.py b/src/antlion/controllers/cellular_lib/PresetSimulation.py
deleted file mode 100644
index f1f649b..0000000
--- a/src/antlion/controllers/cellular_lib/PresetSimulation.py
+++ /dev/null
@@ -1,250 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.cellular_lib.BaseSimulation import BaseSimulation
-from antlion.controllers.cellular_lib import BaseCellularDut
-
-
-class PresetSimulation(BaseSimulation):
-    """5G preset simulation.
-
-    The simulation will be configed by importing SCPI config file
-    instead of individually set params.
-    """
-
-    # Keys to obtain settings from the test_config dictionary.
-    KEY_CELL_INFO = "cell_info"
-    KEY_SCPI_FILE_NAME = "scpi_file"
-
-    def __init__(self,
-                 simulator,
-                 log,
-                 dut,
-                 test_config,
-                 calibration_table,
-                 nr_mode=None):
-        """Initializes the simulator for 5G preset simulation.
-
-        Args:
-            simulator: a cellular simulator controller.
-            log: a logger handle.
-            dut: a device handler implementing BaseCellularDut.
-            test_config: test configuration obtained from the config file.
-            calibration_table: a dictionary containing path losses
-                for different bands.
-        """
-
-        super().__init__(simulator, log, dut, test_config, calibration_table,
-                         nr_mode)
-
-        # Set to KeySight APN
-        log.info('Configuring APN.')
-        self.dut.set_apn('Keysight', 'Keysight')
-        self.num_carriers = None
-
-        # Enable roaming on the phone
-        self.dut.toggle_data_roaming(True)
-
-        # Force device to LTE only so that it connects faster
-        try:
-            self.dut.set_preferred_network_type(
-                BaseCellularDut.PreferredNetworkType.NR_LTE)
-        except Exception as e:
-            # If this fails the test should be able to run anyways, even if it
-            # takes longer to find the cell.
-            self.log.warning('Setting preferred RAT failed: ' + str(e))
-
-    def setup_simulator(self):
-        """Do initial configuration in the simulator. """
-        self.log.info('This simulation does not require initial setup.')
-
-    def configure(self, parameters):
-        """Configures simulation by importing scpi file.
-
-        A pre-made SCPI file include all the essential configuration
-        for the simulation is imported by send SCPI import command
-        to the callbox.
-
-        Args:
-            parameters: a configuration dictionary which includes scpi file path
-                if there is only one carrier, a list if there are multiple cells.
-        """
-        scpi_file = parameters[0][self.KEY_SCPI_FILE_NAME]
-        cell_infos = parameters[0][self.KEY_CELL_INFO]
-
-        self.log.info('Configure test scenario with\n' +
-                      f' SCPI config file: {scpi_file}\n' +
-                      f' cell info: {cell_infos}')
-
-        self.simulator.import_configuration(scpi_file)
-        self.simulator.set_cell_info(cell_infos)
-
-    def start(self):
-        """Start simulation.
-
-        Waiting for the DUT to connect to the callbox.
-
-        Raise:
-            RuntimeError: simulation fail to start
-                due to unable to connect dut and cells.
-        """
-
-        try:
-            self.attach()
-        except Exception as exc:
-            raise RuntimeError('Simulation fail to start.') from exc
-
-    def attach(self):
-        """Attach UE to the callbox.
-
-        Toggle airplane mode on-off and wait for a specified timeout,
-        repeat until the UE connect to the callbox.
-
-        Raise:
-            RuntimeError: attaching fail
-                due to unable to connect dut and cells.
-        """
-        try:
-            self.simulator.wait_until_attached(self.dut, self.attach_timeout,
-                                               self.attach_retries)
-        except Exception as exc:
-            raise RuntimeError('Could not attach to base station.') from exc
-
-    def calibrated_downlink_rx_power(self, bts_config, rsrp):
-        """Convert RSRP to total signal power from the basestation.
-
-        Args:
-            bts_config: the current configuration at the base station
-            rsrp: desired rsrp, contained in a key value pair
-        """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def downlink_calibration(self, rat=None, power_units_conversion_func=None):
-        """Computes downlink path loss and returns the calibration value.
-
-        See base class implementation for details.
-
-        Args:
-            rat: ignored, replaced by 'lteRsrp'.
-            power_units_conversion_func: ignored, replaced by
-                self.rsrp_to_signal_power.
-
-        Returns:
-            Downlink calibration value and measured DL power. Note that the
-            phone only reports RSRP of the primary chain
-        """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def rsrp_to_signal_power(self, rsrp, bts_config):
-        """Converts rsrp to total band signal power
-
-        RSRP is measured per subcarrier, so total band power needs to be
-        multiplied by the number of subcarriers being used.
-
-        Args:
-            rsrp: desired rsrp in dBm.
-            bts_config: a base station configuration object.
-
-        Returns:
-            Total band signal power in dBm
-        """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def maximum_downlink_throughput(self):
-        """Calculates maximum achievable downlink throughput in.
-
-        The calculation is based on the current simulation state
-        Returns:
-            Maximum throughput in mbps.
-        """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def bts_maximum_downlink_throughtput(self, bts_config):
-        """Calculates maximum achievable downlink throughput for a single
-
-        base station from its configuration object.
-
-        Args:
-            bts_config: a base station configuration object.
-
-        Returns:
-            Maximum throughput in mbps.
-        """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def maximum_uplink_throughput(self):
-        """Calculates maximum achievable uplink throughput.
-
-        Returns:
-            Maximum throughput in mbps.
-        """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def bts_maximum_uplink_throughtput(self, bts_config):
-        """Calculates maximum achievable uplink throughput
-
-        The calculation is for selected basestation
-        from its configuration object.
-        Args:
-            bts_config: an LTE base station configuration object.
-
-        Returns:
-            Maximum throughput in mbps.
-
-        """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def calibrate(self, band):
-        """Calculates UL and DL path loss if it wasn't done before
-
-        Before running the base class implementation, configure the base station
-        to only use one downlink antenna with maximum bandwidth.
-
-        Args:
-            band: the band that is currently being calibrated.
-        """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def start_traffic_for_calibration(self):
-        """If MAC padding is enabled, there is no need to start IP traffic. """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def stop_traffic_for_calibration(self):
-        """If MAC padding is enabled, IP traffic wasn't started. """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def get_measured_ul_power(self, samples=5, wait_after_sample=3):
-        """Calculates UL power.
-
-        The calculation is based on measurements from the callbox
-        and the calibration data.
-        Args:
-            samples: the numble of samples to average
-            wait_after_sample: time in seconds to wait in between samples
-
-        Returns:
-            the ul power at the UE antenna ports in dBs
-        """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
diff --git a/src/antlion/controllers/cellular_lib/UmtsSimulation.py b/src/antlion/controllers/cellular_lib/UmtsSimulation.py
deleted file mode 100644
index 316186f..0000000
--- a/src/antlion/controllers/cellular_lib/UmtsSimulation.py
+++ /dev/null
@@ -1,275 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import ntpath
-import time
-
-from antlion.controllers.anritsu_lib import md8475_cellular_simulator as anritsusim
-from antlion.controllers.anritsu_lib.md8475a import BtsNumber
-from antlion.controllers.anritsu_lib.md8475a import BtsPacketRate
-from antlion.controllers.cellular_lib.BaseSimulation import BaseSimulation
-from antlion.controllers.cellular_lib import BaseCellularDut
-
-
-class UmtsSimulation(BaseSimulation):
-    """ Single base station simulation. """
-
-    # Simulation config files in the callbox computer.
-    # These should be replaced in the future by setting up
-    # the same configuration manually.
-
-    UMTS_BASIC_SIM_FILE = 'SIM_default_WCDMA.wnssp'
-
-    UMTS_R99_CELL_FILE = 'CELL_WCDMA_R99_config.wnscp'
-
-    UMTS_R7_CELL_FILE = 'CELL_WCDMA_R7_config.wnscp'
-
-    UMTS_R8_CELL_FILE = 'CELL_WCDMA_R8_config.wnscp'
-
-    # Configuration dictionary keys
-    PARAM_RELEASE_VERSION = "r"
-    PARAM_RELEASE_VERSION_99 = "99"
-    PARAM_RELEASE_VERSION_8 = "8"
-    PARAM_RELEASE_VERSION_7 = "7"
-    PARAM_BAND = "band"
-    PARAM_RRC_STATUS_CHANGE_TIMER = "rrcstatuschangetimer"
-
-    # Units in which signal level is defined in DOWNLINK_SIGNAL_LEVEL_DICTIONARY
-    DOWNLINK_SIGNAL_LEVEL_UNITS = "RSCP"
-
-    # RSCP signal levels thresholds (as reported by Android). Units are dBm
-    # Using LTE thresholds + 24 dB to have equivalent SPD
-    # 24 dB comes from 10 * log10(3.84 MHz / 15 KHz)
-
-    DOWNLINK_SIGNAL_LEVEL_DICTIONARY = {
-        'excellent': -51,
-        'high': -76,
-        'medium': -86,
-        'weak': -96
-    }
-
-    # Transmitted output power for the phone
-    # Stronger Tx power means that the signal received by the BTS is weaker
-    # Units are dBm
-
-    UPLINK_SIGNAL_LEVEL_DICTIONARY = {
-        'low': -20,
-        'medium': 8,
-        'high': 15,
-        'max': 23
-    }
-
-    # Converts packet rate to the throughput that can be actually obtained in
-    # Mbits/s
-
-    packet_rate_to_dl_throughput = {
-        BtsPacketRate.WCDMA_DL384K_UL64K: 0.362,
-        BtsPacketRate.WCDMA_DL21_6M_UL5_76M: 18.5,
-        BtsPacketRate.WCDMA_DL43_2M_UL5_76M: 36.9
-    }
-
-    packet_rate_to_ul_throughput = {
-        BtsPacketRate.WCDMA_DL384K_UL64K: 0.0601,
-        BtsPacketRate.WCDMA_DL21_6M_UL5_76M: 5.25,
-        BtsPacketRate.WCDMA_DL43_2M_UL5_76M: 5.25
-    }
-
-    def __init__(self, simulator, log, dut, test_config, calibration_table):
-        """ Initializes the cellular simulator for a UMTS simulation.
-
-        Loads a simple UMTS simulation environment with 1 basestation. It also
-        creates the BTS handle so we can change the parameters as desired.
-
-        Args:
-            simulator: a cellular simulator controller
-            log: a logger handle
-            dut: a device handler implementing BaseCellularDut
-            test_config: test configuration obtained from the config file
-            calibration_table: a dictionary containing path losses for
-                different bands.
-
-        """
-        # The UMTS simulation relies on the cellular simulator to be a MD8475
-        if not isinstance(self.simulator, anritsusim.MD8475CellularSimulator):
-            raise ValueError('The UMTS simulation relies on the simulator to '
-                             'be an Anritsu MD8475 A/B instrument.')
-
-        # The Anritsu controller needs to be unwrapped before calling
-        # super().__init__ because setup_simulator() requires self.anritsu and
-        # will be called during the parent class initialization.
-        self.anritsu = self.simulator.anritsu
-        self.bts1 = self.anritsu.get_BTS(BtsNumber.BTS1)
-
-        super().__init__(simulator, log, dut, test_config, calibration_table)
-
-        self.dut.set_preferred_network_type(
-            BaseCellularDut.PreferredNetworkType.WCDMA_ONLY)
-
-        self.release_version = None
-        self.packet_rate = None
-
-    def setup_simulator(self):
-        """ Do initial configuration in the simulator. """
-
-        # Load callbox config files
-        callbox_config_path = self.CALLBOX_PATH_FORMAT_STR.format(
-            self.anritsu._md8475_version)
-
-        self.anritsu.load_simulation_paramfile(
-            ntpath.join(callbox_config_path, self.UMTS_BASIC_SIM_FILE))
-
-        # Start simulation if it wasn't started
-        self.anritsu.start_simulation()
-
-    def configure(self, parameters):
-        """ Configures simulation using a dictionary of parameters.
-
-        Processes UMTS configuration parameters.
-
-        Args:
-            parameters: a configuration dictionary
-        """
-        super().configure(parameters)
-
-        # Setup band
-        if self.PARAM_BAND not in parameters:
-            raise ValueError(
-                "The configuration dictionary must include a key '{}' with "
-                "the required band number.".format(self.PARAM_BAND))
-
-        self.set_band(self.bts1, parameters[self.PARAM_BAND])
-        self.load_pathloss_if_required()
-
-        # Setup release version
-        if (self.PARAM_RELEASE_VERSION not in parameters
-                or parameters[self.PARAM_RELEASE_VERSION] not in [
-                    self.PARAM_RELEASE_VERSION_7, self.PARAM_RELEASE_VERSION_8,
-                    self.PARAM_RELEASE_VERSION_99
-                ]):
-            raise ValueError(
-                "The configuration dictionary must include a key '{}' with a "
-                "valid release version.".format(self.PARAM_RELEASE_VERSION))
-
-        self.set_release_version(self.bts1,
-                                 parameters[self.PARAM_RELEASE_VERSION])
-
-        # Setup W-CDMA RRC status change and CELL_DCH timer for idle test case
-        if self.PARAM_RRC_STATUS_CHANGE_TIMER not in parameters:
-            self.log.info(
-                "The config dictionary does not include a '{}' key. Disabled "
-                "by default.".format(self.PARAM_RRC_STATUS_CHANGE_TIMER))
-            self.anritsu.set_umts_rrc_status_change(False)
-        else:
-            self.rrc_sc_timer = int(
-                parameters[self.PARAM_RRC_STATUS_CHANGE_TIMER])
-            self.anritsu.set_umts_rrc_status_change(True)
-            self.anritsu.set_umts_dch_stat_timer(self.rrc_sc_timer)
-
-    def set_release_version(self, bts, release_version):
-        """ Sets the release version.
-
-        Loads the cell parameter file matching the requested release version.
-        Does nothing is release version is already the one requested.
-
-        """
-
-        if release_version == self.release_version:
-            self.log.info(
-                "Release version is already {}.".format(release_version))
-            return
-        if release_version == self.PARAM_RELEASE_VERSION_99:
-
-            cell_parameter_file = self.UMTS_R99_CELL_FILE
-            self.packet_rate = BtsPacketRate.WCDMA_DL384K_UL64K
-
-        elif release_version == self.PARAM_RELEASE_VERSION_7:
-
-            cell_parameter_file = self.UMTS_R7_CELL_FILE
-            self.packet_rate = BtsPacketRate.WCDMA_DL21_6M_UL5_76M
-
-        elif release_version == self.PARAM_RELEASE_VERSION_8:
-
-            cell_parameter_file = self.UMTS_R8_CELL_FILE
-            self.packet_rate = BtsPacketRate.WCDMA_DL43_2M_UL5_76M
-
-        else:
-            raise ValueError("Invalid UMTS release version number.")
-
-        self.anritsu.load_cell_paramfile(
-            ntpath.join(self.callbox_config_path, cell_parameter_file))
-
-        self.release_version = release_version
-
-        # Loading a cell parameter file stops the simulation
-        self.start()
-
-        bts.packet_rate = self.packet_rate
-
-    def maximum_downlink_throughput(self):
-        """ Calculates maximum achievable downlink throughput in the current
-            simulation state.
-
-        Returns:
-            Maximum throughput in mbps.
-
-        """
-
-        if self.packet_rate not in self.packet_rate_to_dl_throughput:
-            raise NotImplementedError("Packet rate not contained in the "
-                                      "throughput dictionary.")
-        return self.packet_rate_to_dl_throughput[self.packet_rate]
-
-    def maximum_uplink_throughput(self):
-        """ Calculates maximum achievable uplink throughput in the current
-            simulation state.
-
-        Returns:
-            Maximum throughput in mbps.
-
-        """
-
-        if self.packet_rate not in self.packet_rate_to_ul_throughput:
-            raise NotImplementedError("Packet rate not contained in the "
-                                      "throughput dictionary.")
-        return self.packet_rate_to_ul_throughput[self.packet_rate]
-
-    def set_downlink_rx_power(self, bts, signal_level):
-        """ Starts IP data traffic while setting downlink power.
-
-        This is only necessary for UMTS for unclear reasons. b/139026916 """
-
-        # Starts IP traffic while changing this setting to force the UE to be
-        # in Communication state, as UL power cannot be set in Idle state
-        self.start_traffic_for_calibration()
-
-        # Wait until it goes to communication state
-        self.anritsu.wait_for_communication_state()
-
-        super().set_downlink_rx_power(bts, signal_level)
-
-        # Stop IP traffic after setting the signal level
-        self.stop_traffic_for_calibration()
-
-    def set_band(self, bts, band):
-        """ Sets the band used for communication.
-
-        Args:
-            bts: basestation handle
-            band: desired band
-        """
-
-        bts.band = band
-        time.sleep(5)  # It takes some time to propagate the new band
diff --git a/src/antlion/controllers/cellular_lib/__init__.py b/src/antlion/controllers/cellular_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/cellular_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/cellular_simulator.py b/src/antlion/controllers/cellular_simulator.py
deleted file mode 100644
index 30eeba3..0000000
--- a/src/antlion/controllers/cellular_simulator.py
+++ /dev/null
@@ -1,489 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-from antlion import logger
-from antlion.controllers import cellular_lib
-
-
-class AbstractCellularSimulator:
-    """ A generic cellular simulator controller class that can be derived to
-    implement equipment specific classes and allows the tests to be implemented
-    without depending on a singular instrument model.
-
-    This class defines the interface that every cellular simulator controller
-    needs to implement and shouldn't be instantiated by itself. """
-
-    # The maximum number of carriers that this simulator can support for LTE
-    LTE_MAX_CARRIERS = None
-
-    # The maximum power that the equipment is able to transmit
-    MAX_DL_POWER = None
-
-    def __init__(self):
-        """ Initializes the cellular simulator. """
-        self.log = logger.create_tagged_trace_logger('CellularSimulator')
-        self.num_carriers = None
-
-    def destroy(self):
-        """ Sends finalization commands to the cellular equipment and closes
-        the connection. """
-        raise NotImplementedError()
-
-    def setup_lte_scenario(self):
-        """ Configures the equipment for an LTE simulation. """
-        raise NotImplementedError()
-
-    def set_band_combination(self, bands):
-        """ Prepares the test equipment for the indicated CA combination.
-
-        Args:
-            bands: a list of bands represented as ints or strings
-        """
-        raise NotImplementedError()
-
-    def configure_bts(self, config, bts_index=0):
-        """ Commands the equipment to setup a base station with the required
-        configuration. This method applies configurations that are common to all
-        RATs.
-
-        Args:
-            config: a BaseSimulation.BtsConfig object.
-            bts_index: the base station number.
-        """
-        self.log.info('The config for {} is {}'.format(bts_index, str(config)))
-
-        if config.output_power:
-            self.set_output_power(bts_index, config.output_power)
-
-        if config.input_power:
-            self.set_input_power(bts_index, config.input_power)
-
-        if isinstance(config, cellular_lib.LteCellConfig.LteCellConfig):
-            self.configure_lte_bts(config, bts_index)
-
-        if isinstance(config, cellular_lib.NrCellConfig.NrCellConfig):
-            self.configure_nr_bts(config, bts_index)
-
-    def configure_lte_bts(self, config, bts_index=0):
-        """ Commands the equipment to setup an LTE base station with the
-        required configuration.
-
-        Args:
-            config: an LteSimulation.BtsConfig object.
-            bts_index: the base station number.
-        """
-        if config.band:
-            self.set_band(bts_index, config.band)
-
-        if config.dlul_config:
-            self.set_tdd_config(bts_index, config.dlul_config)
-
-        if config.ssf_config:
-            self.set_ssf_config(bts_index, config.ssf_config)
-
-        if config.bandwidth:
-            self.set_bandwidth(bts_index, config.bandwidth)
-
-        if config.dl_channel:
-            self.set_downlink_channel_number(bts_index, config.dl_channel)
-
-        if config.mimo_mode:
-            self.set_mimo_mode(bts_index, config.mimo_mode)
-
-        if config.transmission_mode:
-            self.set_transmission_mode(bts_index, config.transmission_mode)
-
-        # Modulation order should be set before set_scheduling_mode being
-        # called.
-        if config.dl_256_qam_enabled is not None:
-            self.set_dl_256_qam_enabled(bts_index, config.dl_256_qam_enabled)
-
-        if config.ul_64_qam_enabled is not None:
-            self.set_ul_64_qam_enabled(bts_index, config.ul_64_qam_enabled)
-
-        if config.scheduling_mode:
-
-            if (config.scheduling_mode ==
-                    cellular_lib.LteSimulation.SchedulingMode.STATIC
-                    and not (config.dl_rbs and config.ul_rbs and config.dl_mcs
-                             and config.ul_mcs)):
-                raise ValueError('When the scheduling mode is set to manual, '
-                                 'the RB and MCS parameters are required.')
-
-            # If scheduling mode is set to Dynamic, the RB and MCS parameters
-            # will be ignored by set_scheduling_mode.
-            self.set_scheduling_mode(bts_index, config.scheduling_mode,
-                                     config.dl_mcs, config.ul_mcs,
-                                     config.dl_rbs, config.ul_rbs)
-
-        # This variable stores a boolean value so the following is needed to
-        # differentiate False from None
-        if config.mac_padding is not None:
-            self.set_mac_padding(bts_index, config.mac_padding)
-
-        if config.cfi:
-            self.set_cfi(bts_index, config.cfi)
-
-        if config.paging_cycle:
-            self.set_paging_cycle(bts_index, config.paging_cycle)
-
-        if config.phich:
-            self.set_phich_resource(bts_index, config.phich)
-
-        if config.drx_connected_mode:
-            self.set_drx_connected_mode(bts_index, config.drx_connected_mode)
-
-            if config.drx_on_duration_timer:
-                self.set_drx_on_duration_timer(bts_index,
-                                               config.drx_on_duration_timer)
-
-            if config.drx_inactivity_timer:
-                self.set_drx_inactivity_timer(bts_index,
-                                              config.drx_inactivity_timer)
-
-            if config.drx_retransmission_timer:
-                self.set_drx_retransmission_timer(
-                    bts_index, config.drx_retransmission_timer)
-
-            if config.drx_long_cycle:
-                self.set_drx_long_cycle(bts_index, config.drx_long_cycle)
-
-            if config.drx_long_cycle_offset is not None:
-                self.set_drx_long_cycle_offset(bts_index,
-                                               config.drx_long_cycle_offset)
-
-    def configure_nr_bts(self, config, bts_index=1):
-        """ Commands the equipment to setup an LTE base station with the
-        required configuration.
-
-        Args:
-            config: an LteSimulation.BtsConfig object.
-            bts_index: the base station number.
-        """
-        if config.band:
-            self.set_band(bts_index, config.band)
-
-        if config.nr_arfcn:
-            self.set_downlink_channel_number(bts_index, config.nr_arfcn)
-
-        if config.bandwidth:
-            self.set_bandwidth(bts_index, config.bandwidth)
-
-        if config.mimo_mode:
-            self.set_mimo_mode(bts_index, config.mimo_mode)
-
-        if config.scheduling_mode:
-
-            if (config.scheduling_mode ==
-                    cellular_lib.LteSimulation.SchedulingMode.STATIC
-                    and not (config.dl_rbs and config.ul_rbs and config.dl_mcs
-                             and config.ul_mcs)):
-                raise ValueError('When the scheduling mode is set to manual, '
-                                 'the RB and MCS parameters are required.')
-
-            # If scheduling mode is set to Dynamic, the RB and MCS parameters
-            # will be ignored by set_scheduling_mode.
-            self.set_scheduling_mode(bts_index, config.scheduling_mode,
-                                     config.dl_mcs, config.ul_mcs,
-                                     config.dl_rbs, config.ul_rbs)
-        if config.mac_padding is not None:
-            self.set_mac_padding(bts_index, config.mac_padding)
-
-    def set_lte_rrc_state_change_timer(self, enabled, time=10):
-        """ Configures the LTE RRC state change timer.
-
-        Args:
-            enabled: a boolean indicating if the timer should be on or off.
-            time: time in seconds for the timer to expire
-        """
-        raise NotImplementedError()
-
-    def set_band(self, bts_index, band):
-        """ Sets the band for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            band: the new band
-        """
-        raise NotImplementedError()
-
-    def set_input_power(self, bts_index, input_power):
-        """ Sets the input power for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            input_power: the new input power
-        """
-        raise NotImplementedError()
-
-    def set_output_power(self, bts_index, output_power):
-        """ Sets the output power for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            output_power: the new output power
-        """
-        raise NotImplementedError()
-
-    def set_tdd_config(self, bts_index, tdd_config):
-        """ Sets the tdd configuration number for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            tdd_config: the new tdd configuration number
-        """
-        raise NotImplementedError()
-
-    def set_ssf_config(self, bts_index, ssf_config):
-        """ Sets the Special Sub-Frame config number for the indicated
-        base station.
-
-        Args:
-            bts_index: the base station number
-            ssf_config: the new ssf config number
-        """
-        raise NotImplementedError()
-
-    def set_bandwidth(self, bts_index, bandwidth):
-        """ Sets the bandwidth for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            bandwidth: the new bandwidth
-        """
-        raise NotImplementedError()
-
-    def set_downlink_channel_number(self, bts_index, channel_number):
-        """ Sets the downlink channel number for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            channel_number: the new channel number
-        """
-        raise NotImplementedError()
-
-    def set_mimo_mode(self, bts_index, mimo_mode):
-        """ Sets the mimo mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            mimo_mode: the new mimo mode
-        """
-        raise NotImplementedError()
-
-    def set_transmission_mode(self, bts_index, transmission_mode):
-        """ Sets the transmission mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            transmission_mode: the new transmission mode
-        """
-        raise NotImplementedError()
-
-    def set_scheduling_mode(self, bts_index, scheduling_mode, mcs_dl, mcs_ul,
-                            nrb_dl, nrb_ul):
-        """ Sets the scheduling mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            scheduling_mode: the new scheduling mode
-            mcs_dl: Downlink MCS (only for STATIC scheduling)
-            mcs_ul: Uplink MCS (only for STATIC scheduling)
-            nrb_dl: Number of RBs for downlink (only for STATIC scheduling)
-            nrb_ul: Number of RBs for uplink (only for STATIC scheduling)
-        """
-        raise NotImplementedError()
-
-    def set_dl_256_qam_enabled(self, bts_index, enabled):
-        """ Determines what MCS table should be used for the downlink.
-
-        Args:
-            bts_index: the base station number
-            enabled: whether 256 QAM should be used
-        """
-        raise NotImplementedError()
-
-    def set_ul_64_qam_enabled(self, bts_index, enabled):
-        """ Determines what MCS table should be used for the uplink.
-
-        Args:
-            bts_index: the base station number
-            enabled: whether 64 QAM should be used
-        """
-        raise NotImplementedError()
-
-    def set_mac_padding(self, bts_index, mac_padding):
-        """ Enables or disables MAC padding in the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            mac_padding: the new MAC padding setting
-        """
-        raise NotImplementedError()
-
-    def set_cfi(self, bts_index, cfi):
-        """ Sets the Channel Format Indicator for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            cfi: the new CFI setting
-        """
-        raise NotImplementedError()
-
-    def set_paging_cycle(self, bts_index, cycle_duration):
-        """ Sets the paging cycle duration for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            cycle_duration: the new paging cycle duration in milliseconds
-        """
-        raise NotImplementedError()
-
-    def set_phich_resource(self, bts_index, phich):
-        """ Sets the PHICH Resource setting for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            phich: the new PHICH resource setting
-        """
-        raise NotImplementedError()
-
-    def set_drx_connected_mode(self, bts_index, active):
-        """ Sets the time interval to wait before entering DRX mode
-
-        Args:
-            bts_index: the base station number
-            active: Boolean indicating whether cDRX mode
-                is active
-        """
-        raise NotImplementedError()
-
-    def set_drx_on_duration_timer(self, bts_index, timer):
-        """ Sets the amount of PDCCH subframes to wait for data after
-            waking up from a DRX cycle
-
-        Args:
-            bts_index: the base station number
-            timer: Number of PDCCH subframes to wait and check for user data
-                after waking from the DRX cycle
-        """
-        raise NotImplementedError()
-
-    def set_drx_inactivity_timer(self, bts_index, timer):
-        """ Sets the number of PDCCH subframes to wait before entering DRX mode
-
-        Args:
-            bts_index: the base station number
-            timer: The amount of time to wait before entering DRX mode
-        """
-        raise NotImplementedError()
-
-    def set_drx_retransmission_timer(self, bts_index, timer):
-        """ Sets the number of consecutive PDCCH subframes to wait
-        for retransmission
-
-        Args:
-            bts_index: the base station number
-            timer: Number of PDCCH subframes to remain active
-
-        """
-        raise NotImplementedError()
-
-    def set_drx_long_cycle(self, bts_index, cycle):
-        """ Sets the amount of subframes representing a DRX long cycle.
-
-        Args:
-            bts_index: the base station number
-            cycle: The amount of subframes representing one long DRX cycle.
-                One cycle consists of DRX sleep + DRX on duration
-        """
-        raise NotImplementedError()
-
-    def set_drx_long_cycle_offset(self, bts_index, offset):
-        """ Sets the offset used to determine the subframe number
-        to begin the long drx cycle
-
-        Args:
-            bts_index: the base station number
-            offset: Number in range 0 to (long cycle - 1)
-        """
-        raise NotImplementedError()
-
-    def lte_attach_secondary_carriers(self, ue_capability_enquiry):
-        """ Activates the secondary carriers for CA. Requires the DUT to be
-        attached to the primary carrier first.
-
-        Args:
-            ue_capability_enquiry: UE capability enquiry message to be sent to
-        the UE before starting carrier aggregation.
-        """
-        raise NotImplementedError()
-
-    def wait_until_attached(self, timeout=120):
-        """ Waits until the DUT is attached to the primary carrier.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        raise NotImplementedError()
-
-    def wait_until_communication_state(self, timeout=120):
-        """ Waits until the DUT is in Communication state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        raise NotImplementedError()
-
-    def wait_until_idle_state(self, timeout=120):
-        """ Waits until the DUT is in Idle state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        raise NotImplementedError()
-
-    def detach(self):
-        """ Turns off all the base stations so the DUT loose connection."""
-        raise NotImplementedError()
-
-    def stop(self):
-        """ Stops current simulation. After calling this method, the simulator
-        will need to be set up again. """
-        raise NotImplementedError()
-
-    def start_data_traffic(self):
-        """ Starts transmitting data from the instrument to the DUT. """
-        raise NotImplementedError()
-
-    def stop_data_traffic(self):
-        """ Stops transmitting data from the instrument to the DUT. """
-        raise NotImplementedError()
-
-    def get_measured_pusch_power(self):
-        """ Queries PUSCH power measured at the callbox.
-
-        Returns:
-            The PUSCH power in the primary input port.
-        """
-        raise NotImplementedError()
-
-
-class CellularSimulatorError(Exception):
-    """ Exceptions thrown when the cellular equipment is unreachable or it
-    returns an error after receiving a command. """
diff --git a/src/antlion/controllers/chameleon_controller.py b/src/antlion/controllers/chameleon_controller.py
deleted file mode 100644
index 7f8ce1a..0000000
--- a/src/antlion/controllers/chameleon_controller.py
+++ /dev/null
@@ -1,188 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import xmlrpc.client
-from subprocess import call
-
-from antlion import signals
-
-MOBLY_CONTROLLER_CONFIG_NAME = "ChameleonDevice"
-ACTS_CONTROLLER_REFERENCE_NAME = "chameleon_devices"
-
-CHAMELEON_DEVICE_EMPTY_CONFIG_MSG = "Configuration is empty, abort!"
-CHAMELEON_DEVICE_NOT_LIST_CONFIG_MSG = "Configuration should be a list, abort!"
-
-audio_bus_endpoints = {
-    'CROS_HEADPHONE': 'Cros device headphone',
-    'CROS_EXTERNAL_MICROPHONE': 'Cros device external microphone',
-    'PERIPHERAL_MICROPHONE': 'Peripheral microphone',
-    'PERIPHERAL_SPEAKER': 'Peripheral speaker',
-    'FPGA_LINEOUT': 'Chameleon FPGA line-out',
-    'FPGA_LINEIN': 'Chameleon FPGA line-in',
-    'BLUETOOTH_OUTPUT': 'Bluetooth module output',
-    'BLUETOOTH_INPUT': 'Bluetooth module input'
-}
-
-
-class ChameleonDeviceError(signals.ControllerError):
-    pass
-
-
-def create(configs):
-    if not configs:
-        raise ChameleonDeviceError(CHAMELEON_DEVICE_EMPTY_CONFIG_MSG)
-    elif not isinstance(configs, list):
-        raise ChameleonDeviceError(CHAMELEON_DEVICE_NOT_LIST_CONFIG_MSG)
-    elif isinstance(configs[0], str):
-        # Configs is a list of IP addresses
-        chameleons = get_instances(configs)
-    return chameleons
-
-
-def destroy(chameleons):
-    for chameleon in chameleons:
-        del chameleon
-
-
-def get_info(chameleons):
-    """Get information on a list of ChameleonDevice objects.
-
-    Args:
-        ads: A list of ChameleonDevice objects.
-
-    Returns:
-        A list of dict, each representing info for ChameleonDevice objects.
-    """
-    device_info = []
-    for chameleon in chameleons:
-        info = {"address": chameleon.address, "port": chameleon.port}
-        device_info.append(info)
-    return device_info
-
-
-def get_instances(ips):
-    """Create ChameleonDevice instances from a list of IPs.
-
-    Args:
-        ips: A list of Chameleon IPs.
-
-    Returns:
-        A list of ChameleonDevice objects.
-    """
-    return [ChameleonDevice(ip) for ip in ips]
-
-
-class ChameleonDevice:
-    """Class representing a Chameleon device.
-
-    Each object of this class represents one Chameleon device in ACTS.
-
-    Attributes:
-        address: The full address to contact the Chameleon device at
-        client: The ServiceProxy of the XMLRPC client.
-        log: A logger object.
-        port: The TCP port number of the Chameleon device.
-    """
-
-    def __init__(self, ip="", port=9992):
-        self.ip = ip
-        self.log = logging.getLogger()
-        self.port = port
-        self.address = "http://{}:{}".format(ip, self.port)
-        try:
-            self.client = xmlrpc.client.ServerProxy(self.address,
-                                                    allow_none=True,
-                                                    verbose=False)
-        except ConnectionRefusedError as err:
-            self.log.exception(
-                "Failed to connect to Chameleon Device at: {}".format(
-                    self.address))
-        self.client.Reset()
-
-    def pull_file(self, chameleon_location, destination):
-        """Pulls a file from the Chameleon device. Usually the raw audio file.
-
-        Args:
-            chameleon_location: The path to the file on the Chameleon device
-            destination: The destination to where to pull it locally.
-        """
-        # TODO: (tturney) implement
-        self.log.error("Definition not yet implemented")
-
-    def start_capturing_audio(self, port_id, has_file=True):
-        """Starts capturing audio.
-
-        Args:
-            port_id: The ID of the audio input port.
-            has_file: True for saving audio data to file. False otherwise.
-        """
-        self.client.StartCapturingAudio(port_id, has_file)
-
-    def stop_capturing_audio(self, port_id):
-        """Stops capturing audio.
-
-        Args:
-            port_id: The ID of the audio input port.
-        Returns:
-            List contain the location of the recorded audio and a dictionary
-            of values relating to the raw audio including: file_type, channel,
-            sample_format, and rate.
-        """
-        return self.client.StopCapturingAudio(port_id)
-
-    def audio_board_connect(self, bus_number, endpoint):
-        """Connects an endpoint to an audio bus.
-
-        Args:
-            bus_number: 1 or 2 for audio bus 1 or bus 2.
-            endpoint: An endpoint defined in audio_bus_endpoints.
-        """
-        self.client.AudioBoardConnect(bus_number, endpoint)
-
-    def audio_board_disconnect(self, bus_number, endpoint):
-        """Connects an endpoint to an audio bus.
-
-        Args:
-            bus_number: 1 or 2 for audio bus 1 or bus 2.
-            endpoint: An endpoint defined in audio_bus_endpoints.
-        """
-        self.client.AudioBoardDisconnect(bus_number, endpoint)
-
-    def audio_board_disable_bluetooth(self):
-        """Disables Bluetooth module on audio board."""
-        self.client.AudioBoardDisableBluetooth()
-
-    def audio_board_clear_routes(self, bus_number):
-        """Clears routes on an audio bus.
-
-        Args:
-            bus_number: 1 or 2 for audio bus 1 or bus 2.
-        """
-        self.client.AudioBoardClearRoutes(bus_number)
-
-    def scp(self, source, destination):
-        """Copies files from the Chameleon device to the host machine.
-
-        Args:
-            source: The file path on the Chameleon board.
-            dest: The file path on the host machine.
-        """
-        cmd = "scp root@{}:/{} {}".format(self.ip, source, destination)
-        try:
-            call(cmd.split(" "))
-        except FileNotFoundError as err:
-            self.log.exception("File not found {}".format(source))
diff --git a/src/antlion/controllers/fastboot.py b/src/antlion/controllers/fastboot.py
index 0b889fa..ed67245 100755
--- a/src/antlion/controllers/fastboot.py
+++ b/src/antlion/controllers/fastboot.py
@@ -30,12 +30,12 @@
         self.ret_code = ret_code
 
     def __str__(self):
-        return ("Error executing fastboot cmd '%s'. ret: %d, stdout: %s,"
-                " stderr: %s") % (self.cmd, self.ret_code, self.stdout,
-                                  self.stderr)
+        return (
+            "Error executing fastboot cmd '%s'. ret: %d, stdout: %s," " stderr: %s"
+        ) % (self.cmd, self.ret_code, self.stdout, self.stderr)
 
 
-class FastbootProxy():
+class FastbootProxy:
     """Proxy class for fastboot.
 
     For syntactic reasons, the '-' in fastboot commands need to be replaced
@@ -52,16 +52,10 @@
             self.fastboot_str = "fastboot"
         self.ssh_connection = ssh_connection
 
-    def _exec_fastboot_cmd(self,
-                           name,
-                           arg_str,
-                           ignore_status=False,
-                           timeout=60):
-        command = ' '.join((self.fastboot_str, name, arg_str))
+    def _exec_fastboot_cmd(self, name, arg_str, ignore_status=False, timeout=60):
+        command = " ".join((self.fastboot_str, name, arg_str))
         if self.ssh_connection:
-            result = self.connection.run(command,
-                                         ignore_status=True,
-                                         timeout=timeout)
+            result = self.connection.run(command, ignore_status=True, timeout=timeout)
         else:
             result = job.run(command, ignore_status=True, timeout=timeout)
         ret, out, err = result.exit_status, result.stdout, result.stderr
@@ -72,18 +66,15 @@
         if ret == 0 or ignore_status:
             return out
         else:
-            raise FastbootError(cmd=command,
-                                stdout=out,
-                                stderr=err,
-                                ret_code=ret)
+            raise FastbootError(cmd=command, stdout=out, stderr=err, ret_code=ret)
 
     def args(self, *args, **kwargs):
-        return job.run(' '.join((self.fastboot_str, ) + args), **kwargs).stdout
+        return job.run(" ".join((self.fastboot_str,) + args), **kwargs).stdout
 
     def __getattr__(self, name):
         def fastboot_call(*args, **kwargs):
-            clean_name = name.replace('_', '-')
-            arg_str = ' '.join(str(elem) for elem in args)
+            clean_name = name.replace("_", "-")
+            arg_str = " ".join(str(elem) for elem in args)
             return self._exec_fastboot_cmd(clean_name, arg_str, **kwargs)
 
         return fastboot_call
diff --git a/src/antlion/controllers/fuchsia_device.py b/src/antlion/controllers/fuchsia_device.py
index c0d62c7..414afb4 100644
--- a/src/antlion/controllers/fuchsia_device.py
+++ b/src/antlion/controllers/fuchsia_device.py
@@ -27,16 +27,26 @@
 from antlion import logger as acts_logger
 from antlion import signals
 from antlion import utils
+from antlion.capabilities.ssh import DEFAULT_SSH_PORT, SSHConfig, SSHError
 from antlion.controllers import pdu
 from antlion.controllers.fuchsia_lib.ffx import FFX
-from antlion.controllers.fuchsia_lib.lib_controllers.netstack_controller import NetstackController
-from antlion.controllers.fuchsia_lib.lib_controllers.wlan_controller import WlanController
-from antlion.controllers.fuchsia_lib.lib_controllers.wlan_policy_controller import WlanPolicyController
+from antlion.controllers.fuchsia_lib.lib_controllers.netstack_controller import (
+    NetstackController,
+)
+from antlion.controllers.fuchsia_lib.lib_controllers.wlan_controller import (
+    WlanController,
+)
+from antlion.controllers.fuchsia_lib.lib_controllers.wlan_policy_controller import (
+    WlanPolicyController,
+)
 from antlion.controllers.fuchsia_lib.package_server import PackageServer
 from antlion.controllers.fuchsia_lib.sl4f import SL4F
-from antlion.controllers.fuchsia_lib.ssh import DEFAULT_SSH_PORT, DEFAULT_SSH_PRIVATE_KEY, DEFAULT_SSH_USER, SSHConfig, SSHProvider, FuchsiaSSHError
+from antlion.controllers.fuchsia_lib.ssh import (
+    DEFAULT_SSH_PRIVATE_KEY,
+    DEFAULT_SSH_USER,
+    FuchsiaSSHProvider,
+)
 from antlion.controllers.fuchsia_lib.utils_lib import flash
-from antlion.libs.proc import job
 from antlion.utils import get_fuchsia_mdns_ipv6_address, get_interface_ip_addresses
 
 MOBLY_CONTROLLER_CONFIG_NAME = "FuchsiaDevice"
@@ -44,8 +54,10 @@
 
 FUCHSIA_DEVICE_EMPTY_CONFIG_MSG = "Configuration is empty, abort!"
 FUCHSIA_DEVICE_NOT_LIST_CONFIG_MSG = "Configuration should be a list, abort!"
-FUCHSIA_DEVICE_INVALID_CONFIG = ("Fuchsia device config must be either a str "
-                                 "or dict. abort! Invalid element %i in %r")
+FUCHSIA_DEVICE_INVALID_CONFIG = (
+    "Fuchsia device config must be either a str "
+    "or dict. abort! Invalid element %i in %r"
+)
 FUCHSIA_DEVICE_NO_IP_MSG = "No IP address specified, abort!"
 FUCHSIA_COULD_NOT_GET_DESIRED_STATE = "Could not %s %s."
 FUCHSIA_INVALID_CONTROL_STATE = "Invalid control state (%s). abort!"
@@ -62,9 +74,9 @@
 
 CHANNEL_OPEN_TIMEOUT = 5
 
-FUCHSIA_REBOOT_TYPE_SOFT = 'soft'
-FUCHSIA_REBOOT_TYPE_SOFT_AND_FLASH = 'flash'
-FUCHSIA_REBOOT_TYPE_HARD = 'hard'
+FUCHSIA_REBOOT_TYPE_SOFT = "soft"
+FUCHSIA_REBOOT_TYPE_SOFT_AND_FLASH = "flash"
+FUCHSIA_REBOOT_TYPE_HARD = "hard"
 
 FUCHSIA_DEFAULT_CONNECT_TIMEOUT = 90
 FUCHSIA_DEFAULT_COMMAND_TIMEOUT = 60
@@ -72,11 +84,11 @@
 FUCHSIA_DEFAULT_CLEAN_UP_COMMAND_TIMEOUT = 15
 
 FUCHSIA_COUNTRY_CODE_TIMEOUT = 15
-FUCHSIA_DEFAULT_COUNTRY_CODE_US = 'US'
+FUCHSIA_DEFAULT_COUNTRY_CODE_US = "US"
 
 MDNS_LOOKUP_RETRY_MAX = 3
 
-VALID_ASSOCIATION_MECHANISMS = {None, 'policy', 'drivers'}
+VALID_ASSOCIATION_MECHANISMS = {None, "policy", "drivers"}
 IP_ADDRESS_TIMEOUT = 15
 
 
@@ -97,8 +109,7 @@
         if isinstance(config, str):
             configs[index] = {"ip": config}
         elif not isinstance(config, dict):
-            raise FuchsiaDeviceError(FUCHSIA_DEVICE_INVALID_CONFIG %
-                                     (index, configs))
+            raise FuchsiaDeviceError(FUCHSIA_DEVICE_INVALID_CONFIG % (index, configs))
     return get_instances(configs)
 
 
@@ -158,15 +169,14 @@
         self.ip: str = fd_conf_data["ip"]
         self.orig_ip: str = fd_conf_data["ip"]
         self.sl4f_port: int = fd_conf_data.get("sl4f_port", 80)
-        self.ssh_username: str = fd_conf_data.get("ssh_username",
-                                                  DEFAULT_SSH_USER)
+        self.ssh_username: str = fd_conf_data.get("ssh_username", DEFAULT_SSH_USER)
         self.ssh_port: int = fd_conf_data.get("ssh_port", DEFAULT_SSH_PORT)
+        self.ssh_binary_path: str = fd_conf_data.get("ssh_binary_path", "ssh")
 
         def expand(path: str) -> str:
             return os.path.expandvars(os.path.expanduser(path))
 
-        def path_from_config(name: str,
-                             default: Optional[str] = None) -> Optional[str]:
+        def path_from_config(name: str, default: Optional[str] = None) -> Optional[str]:
             path = fd_conf_data.get(name, default)
             if not path:
                 return path
@@ -175,11 +185,13 @@
         def assert_exists(name: str, path: str) -> None:
             if not path:
                 raise FuchsiaDeviceError(
-                    f'Please specify "${name}" in your configuration file')
+                    f'Please specify "${name}" in your configuration file'
+                )
             if not os.path.exists(path):
                 raise FuchsiaDeviceError(
                     f'Please specify a correct "${name}" in your configuration '
-                    f'file: "{path}" does not exist')
+                    f'file: "{path}" does not exist'
+                )
 
         self.specific_image: Optional[str] = path_from_config("specific_image")
         if self.specific_image:
@@ -188,71 +200,76 @@
         # Path to a tar.gz archive with pm and amber-files, as necessary for
         # starting a package server.
         self.packages_archive_path: Optional[str] = path_from_config(
-            "packages_archive_path", None)
+            "packages_archive_path", None
+        )
         if self.packages_archive_path:
             assert_exists("packages_archive_path", self.packages_archive_path)
 
-        def required_path_from_config(name: str,
-                                      default: Optional[str] = None) -> str:
+        def required_path_from_config(name: str, default: Optional[str] = None) -> str:
             path = path_from_config(name, default)
             assert_exists(name, path)
             return path
 
         self.ssh_priv_key: str = required_path_from_config(
-            "ssh_priv_key", DEFAULT_SSH_PRIVATE_KEY)
-        self.authorized_file: str = required_path_from_config(
-            "authorized_file_loc", f'{self.ssh_priv_key}.pub')
+            "ssh_priv_key", DEFAULT_SSH_PRIVATE_KEY
+        )
         self.ffx_binary_path: str = required_path_from_config(
-            "ffx_binary_path", "${FUCHSIA_DIR}/.jiri_root/bin/ffx")
+            "ffx_binary_path", "${FUCHSIA_DIR}/.jiri_root/bin/ffx"
+        )
 
-        self.serial_number: Optional[str] = fd_conf_data.get(
-            "serial_number", None)
+        self.authorized_file: Optional[str] = fd_conf_data.get(
+            "authorized_file_loc", None
+        )
+        self.serial_number: Optional[str] = fd_conf_data.get("serial_number", None)
         self.device_type: Optional[str] = fd_conf_data.get("device_type", None)
-        self.product_type: Optional[str] = fd_conf_data.get(
-            "product_type", None)
+        self.product_type: Optional[str] = fd_conf_data.get("product_type", None)
         self.board_type: Optional[str] = fd_conf_data.get("board_type", None)
-        self.build_number: Optional[str] = fd_conf_data.get(
-            "build_number", None)
+        self.build_number: Optional[str] = fd_conf_data.get("build_number", None)
         self.build_type: Optional[str] = fd_conf_data.get("build_type", None)
 
         self.mdns_name: Optional[str] = fd_conf_data.get("mdns_name", None)
 
-        self.hard_reboot_on_fail: bool = fd_conf_data.get(
-            "hard_reboot_on_fail", False)
+        self.hard_reboot_on_fail: bool = fd_conf_data.get("hard_reboot_on_fail", False)
         self.take_bug_report_on_fail: bool = fd_conf_data.get(
-            "take_bug_report_on_fail", False)
+            "take_bug_report_on_fail", False
+        )
         self.device_pdu_config = fd_conf_data.get("PduDevice", None)
         self.config_country_code: str = fd_conf_data.get(
-            'country_code', FUCHSIA_DEFAULT_COUNTRY_CODE_US).upper()
+            "country_code", FUCHSIA_DEFAULT_COUNTRY_CODE_US
+        ).upper()
 
         output_path = context.get_current_context().get_base_output_path()
-        self.ssh_config = os.path.join(output_path,
-                                       "ssh_config_{}".format(self.ip))
+        self.ssh_config = os.path.join(output_path, "ssh_config_{}".format(self.ip))
         self._generate_ssh_config(self.ssh_config)
 
         # WLAN interface info is populated inside configure_wlan
         self.wlan_client_interfaces = {}
         self.wlan_ap_interfaces = {}
         self.wlan_client_test_interface_name = fd_conf_data.get(
-            'wlan_client_test_interface', None)
+            "wlan_client_test_interface", None
+        )
         self.wlan_ap_test_interface_name = fd_conf_data.get(
-            'wlan_ap_test_interface', None)
-        self.wlan_features: List[str] = fd_conf_data.get('wlan_features', [])
+            "wlan_ap_test_interface", None
+        )
+        self.wlan_features: List[str] = fd_conf_data.get("wlan_features", [])
 
         # Whether to use 'policy' or 'drivers' for WLAN connect/disconnect calls
         # If set to None, wlan is not configured.
         self.association_mechanism = None
         # Defaults to policy layer, unless otherwise specified in the config
         self.default_association_mechanism = fd_conf_data.get(
-            'association_mechanism', 'policy')
+            "association_mechanism", "policy"
+        )
 
         # Whether to clear and preserve existing saved networks and client
         # connections state, to be restored at device teardown.
         self.default_preserve_saved_networks = fd_conf_data.get(
-            'preserve_saved_networks', True)
+            "preserve_saved_networks", True
+        )
 
-        if not utils.is_valid_ipv4_address(
-                self.ip) and not utils.is_valid_ipv6_address(self.ip):
+        if not utils.is_valid_ipv4_address(self.ip) and not utils.is_valid_ipv6_address(
+            self.ip
+        ):
             mdns_ip = None
             for retry_counter in range(MDNS_LOOKUP_RETRY_MAX):
                 mdns_ip = get_fuchsia_mdns_ipv6_address(self.ip)
@@ -266,19 +283,21 @@
                 self.mdns_name = self.mdns_name or self.ip
                 self.ip = mdns_ip
             else:
-                raise ValueError('Invalid IP: %s' % self.ip)
+                raise ValueError("Invalid IP: %s" % self.ip)
 
         self.log = acts_logger.create_tagged_trace_logger(
-            "FuchsiaDevice | %s" % self.orig_ip)
+            "FuchsiaDevice | %s" % self.orig_ip
+        )
 
-        self.ping_rtt_match = re.compile(r'RTT Min/Max/Avg '
-                                         r'= \[ (.*?) / (.*?) / (.*?) \] ms')
-        self.serial = re.sub('[.:%]', '_', self.ip)
-        log_path_base = getattr(logging, 'log_path', '/tmp/logs')
-        self.log_path = os.path.join(log_path_base,
-                                     'FuchsiaDevice%s' % self.serial)
+        self.ping_rtt_match = re.compile(
+            r"RTT Min/Max/Avg " r"= \[ (.*?) / (.*?) / (.*?) \] ms"
+        )
+        self.serial = re.sub("[.:%]", "_", self.ip)
+        log_path_base = getattr(logging, "log_path", "/tmp/logs")
+        self.log_path = os.path.join(log_path_base, "FuchsiaDevice%s" % self.serial)
         self.fuchsia_log_file_path = os.path.join(
-            self.log_path, "fuchsialog_%s_debug.txt" % self.serial)
+            self.log_path, "fuchsialog_%s_debug.txt" % self.serial
+        )
         self.log_process = None
         self.package_server = None
 
@@ -291,38 +310,46 @@
         The sl4f module uses lazy-initialization; it will initialize an sl4f
         server on the host device when it is required.
         """
-        if not hasattr(self, '_sl4f'):
+        if not hasattr(self, "_sl4f"):
             self._sl4f = SL4F(self.ssh, self.sl4f_port)
-            self.log.info('Started SL4F server')
+            self.log.info("Started SL4F server")
         return self._sl4f
 
     @sl4f.deleter
     def sl4f(self):
-        if not hasattr(self, '_sl4f'):
+        if not hasattr(self, "_sl4f"):
             return
-        self.log.debug('Cleaning up SL4F')
+        self.log.debug("Cleaning up SL4F")
         del self._sl4f
 
     @property
     def ssh(self):
         """Get the SSH provider module configured for this device."""
-        if not hasattr(self, '_ssh'):
+        if not hasattr(self, "_ssh"):
             if not self.ssh_port:
                 raise FuchsiaConfigError(
-                    'Must provide "ssh_port: <int>" in the device config')
+                    'Must provide "ssh_port: <int>" in the device config'
+                )
             if not self.ssh_priv_key:
                 raise FuchsiaConfigError(
                     'Must provide "ssh_priv_key: <file path>" in the device config'
                 )
-            self._ssh = SSHProvider(
-                SSHConfig(self.ip, self.ssh_priv_key, port=self.ssh_port))
+            self._ssh = FuchsiaSSHProvider(
+                SSHConfig(
+                    self.ssh_username,
+                    self.ip,
+                    self.ssh_priv_key,
+                    port=self.ssh_port,
+                    ssh_binary=self.ssh_binary_path,
+                )
+            )
         return self._ssh
 
     @ssh.deleter
     def ssh(self):
-        if not hasattr(self, '_ssh'):
+        if not hasattr(self, "_ssh"):
             return
-        self.log.debug('Cleaning up SSH')
+        self.log.debug("Cleaning up SSH")
         del self._ssh
 
     @property
@@ -336,20 +363,21 @@
         access again. Note re-initialization will interrupt any running ffx
         calls.
         """
-        if not hasattr(self, '_ffx'):
+        if not hasattr(self, "_ffx"):
             if not self.mdns_name:
                 raise FuchsiaConfigError(
                     'Must provide "mdns_name: <device mDNS name>" in the device config'
                 )
-            self._ffx = FFX(self.ffx_binary_path, self.mdns_name, self.ip,
-                            self.ssh_priv_key)
+            self._ffx = FFX(
+                self.ffx_binary_path, self.mdns_name, self.ip, self.ssh_priv_key
+            )
         return self._ffx
 
     @ffx.deleter
     def ffx(self):
-        if not hasattr(self, '_ffx'):
+        if not hasattr(self, "_ffx"):
             return
-        self.log.debug('Cleaning up ffx')
+        self.log.debug("Cleaning up ffx")
         self._ffx.clean_up()
         del self._ffx
 
@@ -359,7 +387,8 @@
         Args:
             file_path: Path to write the generated SSH config
         """
-        content = textwrap.dedent(f"""\
+        content = textwrap.dedent(
+            f"""\
             Host *
                 CheckHostIP no
                 StrictHostKeyChecking no
@@ -376,9 +405,10 @@
                 ServerAliveInterval 1
                 ServerAliveCountMax 1
                 LogLevel ERROR
-            """)
+            """
+        )
 
-        with open(file_path, 'w') as file:
+        with open(file_path, "w") as file:
             file.write(content)
 
     def init_controllers(self):
@@ -389,7 +419,7 @@
         self.wlan_controller = WlanController(self)
 
         # Contains WLAN policy functions like save_network, remove_network, etc
-        self.wlan_policy_controller = WlanPolicyController(self.sl4f, self.ffx)
+        self.wlan_policy_controller = WlanPolicyController(self.sl4f, self.ssh)
 
     def start_package_server(self):
         if not self.packages_archive_path:
@@ -399,7 +429,8 @@
                 "the DUT. If this is not the case, either run your own package "
                 "server, or configure these fields appropriately. "
                 "This is usually required for the Fuchsia iPerf3 client or "
-                "other testing utilities not on device cache.")
+                "other testing utilities not on device cache."
+            )
             return
         if self.package_server:
             self.log.warn(
@@ -427,16 +458,18 @@
         """
         for cmd_dict in cmd_dicts:
             try:
-                cmd = cmd_dict['cmd']
+                cmd = cmd_dict["cmd"]
             except KeyError:
                 raise FuchsiaDeviceError(
                     'To run a command via config, you must provide key "cmd" '
-                    'containing the command string.')
+                    "containing the command string."
+                )
 
-            timeout = cmd_dict.get('timeout', FUCHSIA_DEFAULT_COMMAND_TIMEOUT)
+            timeout = cmd_dict.get("timeout", FUCHSIA_DEFAULT_COMMAND_TIMEOUT)
             # Catch both boolean and string values from JSON
-            skip_status_code_check = 'true' == str(
-                cmd_dict.get('skip_status_code_check', False)).lower()
+            skip_status_code_check = (
+                "true" == str(cmd_dict.get("skip_status_code_check", False)).lower()
+            )
 
             if skip_status_code_check:
                 self.log.info(f'Running command "{cmd}" and ignoring result.')
@@ -446,23 +479,23 @@
             try:
                 result = self.ssh.run(cmd, timeout_sec=timeout)
                 self.log.debug(result)
-            except FuchsiaSSHError as e:
+            except SSHError as e:
                 if not skip_status_code_check:
                     raise FuchsiaDeviceError(
-                        'Failed device specific commands for initial configuration'
+                        "Failed device specific commands for initial configuration"
                     ) from e
 
-    def configure_wlan(self,
-                       association_mechanism=None,
-                       preserve_saved_networks=None):
+    def configure_wlan(
+        self, association_mechanism: str = None, preserve_saved_networks: bool = None
+    ) -> None:
         """
         Readies device for WLAN functionality. If applicable, connects to the
         policy layer and clears/saves preexisting saved networks.
 
         Args:
-            association_mechanism: string, 'policy' or 'drivers'. If None, uses
+            association_mechanism: either 'policy' or 'drivers'. If None, uses
                 the default value from init (can be set by ACTS config)
-            preserve_saved_networks: bool, whether to clear existing saved
+            preserve_saved_networks: whether to clear existing saved
                 networks, and preserve them for restoration later. If None, uses
                 the default value from init (can be set by ACTS config)
 
@@ -483,28 +516,32 @@
 
         if association_mechanism not in VALID_ASSOCIATION_MECHANISMS:
             raise FuchsiaDeviceError(
-                'Invalid FuchsiaDevice association_mechanism: %s' %
-                association_mechanism)
+                "Invalid FuchsiaDevice association_mechanism: %s"
+                % association_mechanism
+            )
 
         # Allows for wlan to be set up differently in different tests
         if self.association_mechanism:
-            self.log.info('Deconfiguring WLAN')
+            self.log.info("Deconfiguring WLAN")
             self.deconfigure_wlan()
 
         self.association_mechanism = association_mechanism
 
-        self.log.info('Configuring WLAN w/ association mechanism: %s' %
-                      association_mechanism)
-        if association_mechanism == 'drivers':
+        self.log.info(
+            "Configuring WLAN w/ association mechanism: " f"{association_mechanism}"
+        )
+        if association_mechanism == "drivers":
             self.log.warn(
-                'You may encounter unusual device behavior when using the '
-                'drivers directly for WLAN. This should be reserved for '
-                'debugging specific issues. Normal test runs should use the '
-                'policy layer.')
+                "You may encounter unusual device behavior when using the "
+                "drivers directly for WLAN. This should be reserved for "
+                "debugging specific issues. Normal test runs should use the "
+                "policy layer."
+            )
             if preserve_saved_networks:
                 self.log.warn(
-                    'Unable to preserve saved networks when using drivers '
-                    'association mechanism (requires policy layer control).')
+                    "Unable to preserve saved networks when using drivers "
+                    "association mechanism (requires policy layer control)."
+                )
         else:
             # This requires SL4F calls, so it can only happen with actual
             # devices, not with unit tests.
@@ -524,22 +561,21 @@
             FuchsiaDeviveError, if deconfigure fails.
         """
         if not self.association_mechanism:
-            self.log.debug(
-                'WLAN not configured before deconfigure was called.')
+            self.log.debug("WLAN not configured before deconfigure was called.")
             return
         # If using policy, stop client connections. Otherwise, just clear
         # variables.
-        if self.association_mechanism != 'drivers':
+        if self.association_mechanism != "drivers":
             self.wlan_policy_controller._deconfigure_wlan()
         self.association_mechanism = None
 
-    def reboot(self,
-               use_ssh: bool = False,
-               unreachable_timeout: int = FUCHSIA_DEFAULT_CONNECT_TIMEOUT,
-               ping_timeout: int = FUCHSIA_DEFAULT_CONNECT_TIMEOUT,
-               ssh_timeout: int = FUCHSIA_DEFAULT_CONNECT_TIMEOUT,
-               reboot_type: int = FUCHSIA_REBOOT_TYPE_SOFT,
-               testbed_pdus: List[pdu.PduDevice] = None) -> None:
+    def reboot(
+        self,
+        use_ssh: bool = False,
+        unreachable_timeout: int = FUCHSIA_DEFAULT_CONNECT_TIMEOUT,
+        reboot_type: str = FUCHSIA_REBOOT_TYPE_SOFT,
+        testbed_pdus: List[pdu.PduDevice] = [],
+    ) -> None:
         """Reboot a FuchsiaDevice.
 
         Soft reboots the device, verifies it becomes unreachable, then verifies
@@ -549,8 +585,6 @@
             use_ssh: if True, use fuchsia shell command via ssh to reboot
                 instead of SL4F.
             unreachable_timeout: time to wait for device to become unreachable.
-            ping_timeout:time to wait for device to respond to pings.
-            ssh_timeout: time to wait for device to be reachable via ssh.
             reboot_type: 'soft', 'hard' or 'flash'.
             testbed_pdus: all testbed PDUs.
 
@@ -560,57 +594,43 @@
         """
         if reboot_type == FUCHSIA_REBOOT_TYPE_SOFT:
             if use_ssh:
-                self.log.info('Soft rebooting via SSH')
+                self.log.info("Soft rebooting via SSH")
                 try:
                     self.ssh.run(
-                        'dm reboot',
-                        timeout_sec=FUCHSIA_RECONNECT_AFTER_REBOOT_TIME)
-                except FuchsiaSSHError as e:
-                    if 'closed by remote host' not in e.result.stderr:
+                        "dm reboot", timeout_sec=FUCHSIA_RECONNECT_AFTER_REBOOT_TIME
+                    )
+                except SSHError as e:
+                    if "closed by remote host" not in e.result.stderr:
                         raise e
             else:
-                self.log.info('Soft rebooting via SL4F')
-                self.sl4f.hardware_power_statecontrol_lib.suspendReboot(
-                    timeout=3)
-            self._check_unreachable(timeout_sec=unreachable_timeout)
+                self.log.info("Soft rebooting via SL4F")
+                self.sl4f.hardware_power_statecontrol_lib.suspendReboot(timeout=3)
+            self.ssh.wait_until_unreachable(timeout_sec=unreachable_timeout)
 
         elif reboot_type == FUCHSIA_REBOOT_TYPE_HARD:
-            self.log.info('Hard rebooting via PDU')
+            self.log.info("Hard rebooting via PDU")
             if not testbed_pdus:
-                raise AttributeError('Testbed PDUs must be supplied '
-                                     'to hard reboot a fuchsia_device.')
+                raise AttributeError(
+                    "Testbed PDUs must be supplied " "to hard reboot a fuchsia_device."
+                )
             device_pdu, device_pdu_port = pdu.get_pdu_port_for_device(
-                self.device_pdu_config, testbed_pdus)
-            self.log.info('Killing power to FuchsiaDevice')
+                self.device_pdu_config, testbed_pdus
+            )
+            self.log.info("Killing power to FuchsiaDevice")
             device_pdu.off(str(device_pdu_port))
-            self._check_unreachable(timeout_sec=unreachable_timeout)
-            self.log.info('Restoring power to FuchsiaDevice')
+            self.ssh.wait_until_unreachable(timeout_sec=unreachable_timeout)
+            self.log.info("Restoring power to FuchsiaDevice")
             device_pdu.on(str(device_pdu_port))
 
         elif reboot_type == FUCHSIA_REBOOT_TYPE_SOFT_AND_FLASH:
             flash(self, use_ssh, FUCHSIA_RECONNECT_AFTER_REBOOT_TIME)
 
         else:
-            raise ValueError('Invalid reboot type: %s' % reboot_type)
-
-        self._check_reachable(timeout_sec=ping_timeout)
+            raise ValueError("Invalid reboot type: %s" % reboot_type)
 
         # Cleanup services
         self.stop_services()
 
-        self.log.info('Waiting for device to allow ssh connection.')
-        end_time = time.time() + ssh_timeout
-        while time.time() < end_time:
-            try:
-                self.ssh.run('echo')
-            except Exception as e:
-                self.log.debug(f'Retrying SSH to device. Details: {e}')
-            else:
-                break
-        else:
-            raise ConnectionError('Failed to connect to device via SSH.')
-        self.log.info('Device now available via ssh.')
-
         # TODO (b/246852449): Move configure_wlan to other controllers.
         # If wlan was configured before reboot, it must be configured again
         # after rebooting, as it was before reboot. No preserving should occur.
@@ -620,49 +640,32 @@
             self.association_mechanism = None
             self.configure_wlan(
                 association_mechanism=pre_reboot_association_mechanism,
-                preserve_saved_networks=False)
-
-        self.log.info('Device has rebooted')
-
-    def version(self):
-        """Returns the version of Fuchsia running on the device.
-
-        Returns:
-            A string containing the Fuchsia version number or nothing if there
-            is no version information attached during the build.
-            For example, "5.20210713.2.1" or "".
-
-        Raises:
-            FFXTimeout: when the command times out.
-            FFXError: when the command returns non-zero and skip_status_code_check is False.
-        """
-        target_info_json = self.ffx.run("target show --json").stdout
-        target_info = json.loads(target_info_json)
-        build_info = [
-            entry for entry in target_info if entry["label"] == "build"
-        ]
-        if len(build_info) != 1:
-            self.log.warning(
-                f'Expected one entry with label "build", found {build_info}')
-            return ""
-        version_info = [
-            child for child in build_info[0]["child"]
-            if child["label"] == "version"
-        ]
-        if len(version_info) != 1:
-            self.log.warning(
-                f'Expected one entry child with label "version", found {build_info}'
+                preserve_saved_networks=False,
             )
-            return ""
-        return version_info[0]["value"]
 
-    def ping(self,
-             dest_ip,
-             count=3,
-             interval=1000,
-             timeout=1000,
-             size=25,
-             additional_ping_params=None):
+        self.log.info("Device has rebooted")
+
+    def version(self) -> str:
+        """Return the version of Fuchsia running on the device."""
+        return self.sl4f.device_lib.get_version()["result"]
+
+    def device_name(self) -> str:
+        """Return the name of the device."""
+        return self.sl4f.device_lib.get_device_name()["result"]
+
+    def product_name(self) -> str:
+        """Return the product name of the device."""
+        return self.sl4f.device_lib.get_product_name()["result"]
+
+    def ping(
+        self,
+        dest_ip,
+        count=3,
+        interval=1000,
+        timeout=1000,
+        size=25,
+        additional_ping_params=None,
+    ):
         """Pings from a Fuchsia device to an IPv4 address or hostname
 
         Args:
@@ -690,49 +693,54 @@
         rtt_avg = None
         self.log.debug("Pinging %s..." % dest_ip)
         if not additional_ping_params:
-            additional_ping_params = ''
+            additional_ping_params = ""
 
         try:
             ping_result = self.ssh.run(
-                f'ping -c {count} -i {interval} -t {timeout} -s {size} '
-                f'{additional_ping_params} {dest_ip}')
-        except FuchsiaSSHError as e:
+                f"ping -c {count} -i {interval} -t {timeout} -s {size} "
+                f"{additional_ping_params} {dest_ip}"
+            )
+        except SSHError as e:
             ping_result = e.result
 
         if ping_result.stderr:
             status = False
         else:
             status = True
-            rtt_line = ping_result.stdout.split('\n')[:-1]
+            rtt_line = ping_result.stdout.split("\n")[:-1]
             rtt_line = rtt_line[-1]
             rtt_stats = re.search(self.ping_rtt_match, rtt_line)
             rtt_min = rtt_stats.group(1)
             rtt_max = rtt_stats.group(2)
             rtt_avg = rtt_stats.group(3)
         return {
-            'status': status,
-            'rtt_min': rtt_min,
-            'rtt_max': rtt_max,
-            'rtt_avg': rtt_avg,
-            'stdout': ping_result.stdout,
-            'stderr': ping_result.stderr
+            "status": status,
+            "rtt_min": rtt_min,
+            "rtt_max": rtt_max,
+            "rtt_avg": rtt_avg,
+            "stdout": ping_result.stdout,
+            "stderr": ping_result.stderr,
         }
 
-    def can_ping(self,
-                 dest_ip,
-                 count=1,
-                 interval=1000,
-                 timeout=1000,
-                 size=25,
-                 additional_ping_params=None):
+    def can_ping(
+        self,
+        dest_ip,
+        count=1,
+        interval=1000,
+        timeout=1000,
+        size=25,
+        additional_ping_params=None,
+    ) -> bool:
         """Returns whether fuchsia device can ping a given dest address"""
-        ping_result = self.ping(dest_ip,
-                                count=count,
-                                interval=interval,
-                                timeout=timeout,
-                                size=size,
-                                additional_ping_params=additional_ping_params)
-        return ping_result['status']
+        ping_result = self.ping(
+            dest_ip,
+            count=count,
+            interval=interval,
+            timeout=timeout,
+            size=size,
+            additional_ping_params=additional_ping_params,
+        )
+        return ping_result["status"]
 
     def clean_up(self):
         """Cleans up the FuchsiaDevice object, releases any resources it
@@ -744,12 +752,11 @@
         thread will never join and will leave tests hanging.
         """
         # If and only if wlan is configured, and using the policy layer
-        if self.association_mechanism == 'policy':
+        if self.association_mechanism == "policy":
             try:
                 self.wlan_policy_controller.clean_up()
             except Exception as err:
-                self.log.warning('Unable to clean up WLAN Policy layer: %s' %
-                                 err)
+                self.log.warning("Unable to clean up WLAN Policy layer: %s" % err)
 
         self.stop_services()
 
@@ -771,23 +778,25 @@
             timeout.
         """
         self.log.info(
-            f'Checking for valid ipv4 addr. Retry {IP_ADDRESS_TIMEOUT} seconds.'
+            f"Checking for valid ipv4 addr. Retry {IP_ADDRESS_TIMEOUT} seconds."
         )
         timeout = time.time() + IP_ADDRESS_TIMEOUT
         while time.time() < timeout:
             ip_addrs = self.get_interface_ip_addresses(interface)
 
-            if len(ip_addrs['ipv4_private']) > 0:
-                self.log.info("Device has an ipv4 address: "
-                              f"{ip_addrs['ipv4_private'][0]}")
+            if len(ip_addrs["ipv4_private"]) > 0:
+                self.log.info(
+                    "Device has an ipv4 address: " f"{ip_addrs['ipv4_private'][0]}"
+                )
                 break
             else:
                 self.log.debug(
-                    'Device does not yet have an ipv4 address...retrying in 1 '
-                    'second.')
+                    "Device does not yet have an ipv4 address...retrying in 1 "
+                    "second."
+                )
                 time.sleep(1)
         else:
-            raise ConnectionError('Device failed to get an ipv4 address.')
+            raise ConnectionError("Device failed to get an ipv4 address.")
 
     def wait_for_ipv6_addr(self, interface: str) -> None:
         """Checks if device has an ipv6 private local address. Sleeps 1 second
@@ -801,58 +810,25 @@
             timeout.
         """
         self.log.info(
-            f'Checking for valid ipv6 addr. Retry {IP_ADDRESS_TIMEOUT} seconds.'
+            f"Checking for valid ipv6 addr. Retry {IP_ADDRESS_TIMEOUT} seconds."
         )
         timeout = time.time() + IP_ADDRESS_TIMEOUT
         while time.time() < timeout:
             ip_addrs = self.get_interface_ip_addresses(interface)
-            if len(ip_addrs['ipv6_private_local']) > 0:
-                self.log.info("Device has an ipv6 private local address: "
-                              f"{ip_addrs['ipv6_private_local'][0]}")
+            if len(ip_addrs["ipv6_private_local"]) > 0:
+                self.log.info(
+                    "Device has an ipv6 private local address: "
+                    f"{ip_addrs['ipv6_private_local'][0]}"
+                )
                 break
             else:
                 self.log.debug(
-                    'Device does not yet have an ipv6 address...retrying in 1 '
-                    'second.')
+                    "Device does not yet have an ipv6 address...retrying in 1 "
+                    "second."
+                )
                 time.sleep(1)
         else:
-            raise ConnectionError('Device failed to get an ipv6 address.')
-
-    def _check_reachable(self,
-                         timeout_sec: int = FUCHSIA_DEFAULT_CONNECT_TIMEOUT
-                         ) -> None:
-        """Checks the reachability of the Fuchsia device."""
-        end_time = time.time() + timeout_sec
-        self.log.info('Verifying device is reachable.')
-        while time.time() < end_time:
-            # TODO (b/249343632): Consolidate ping commands and fix timeout in
-            # utils.can_ping.
-            if utils.can_ping(job, self.ip):
-                self.log.info('Device is reachable.')
-                break
-            else:
-                self.log.debug(
-                    'Device is not reachable. Retrying in 1 second.')
-                time.sleep(1)
-        else:
-            raise ConnectionError('Device is unreachable.')
-
-    def _check_unreachable(self,
-                           timeout_sec: int = FUCHSIA_DEFAULT_CONNECT_TIMEOUT
-                           ) -> None:
-        """Checks the Fuchsia device becomes unreachable."""
-        end_time = time.time() + timeout_sec
-        self.log.info('Verifying device is unreachable.')
-        while (time.time() < end_time):
-            if utils.can_ping(job, self.ip):
-                self.log.debug(
-                    'Device is still reachable. Retrying in 1 second.')
-                time.sleep(1)
-            else:
-                self.log.info('Device is not reachable.')
-                break
-        else:
-            raise ConnectionError('Device failed to become unreachable.')
+            raise ConnectionError("Device failed to get an ipv6 address.")
 
     def check_connect_response(self, connect_response):
         if connect_response.get("error") is None:
@@ -869,8 +845,10 @@
                 return True
         else:
             # the response indicates an error - log and raise failure
-            self.log.debug("Aborting! - Connect call failed with error: %s" %
-                           connect_response.get("error"))
+            self.log.debug(
+                "Aborting! - Connect call failed with error: %s"
+                % connect_response.get("error")
+            )
             return False
 
     def check_disconnect_response(self, disconnect_response):
@@ -879,8 +857,10 @@
             return True
         else:
             # the response indicates an error - log and raise failure
-            self.log.debug("Disconnect call failed with error: %s" %
-                           disconnect_response.get("error"))
+            self.log.debug(
+                "Disconnect call failed with error: %s"
+                % disconnect_response.get("error")
+            )
             return False
 
     # TODO(fxb/64657): Determine more stable solution to country code config on
@@ -894,41 +874,44 @@
         # Country code can be None, from antlion config.
         if desired_country_code:
             desired_country_code = desired_country_code.upper()
-            response = self.sl4f.regulatory_region_lib.setRegion(
-                desired_country_code)
-            if response.get('error'):
+            response = self.sl4f.regulatory_region_lib.setRegion(desired_country_code)
+            if response.get("error"):
                 raise FuchsiaDeviceError(
-                    'Failed to set regulatory domain. Err: %s' %
-                    response['error'])
+                    "Failed to set regulatory domain. Err: %s" % response["error"]
+                )
 
             phy_list_response = self.sl4f.wlan_lib.wlanPhyIdList()
-            if phy_list_response.get('error'):
+            if phy_list_response.get("error"):
                 raise FuchsiaDeviceError(
-                    f'Failed to get phy list. Err: {response["error"]}')
-            phy_list = phy_list_response.get('result')
+                    f'Failed to get phy list. Err: {response["error"]}'
+                )
+            phy_list = phy_list_response.get("result")
             if not phy_list:
-                raise FuchsiaDeviceError('No phy available in phy list')
+                raise FuchsiaDeviceError("No phy available in phy list")
             phy_id = phy_list[0]
 
             end_time = time.time() + FUCHSIA_COUNTRY_CODE_TIMEOUT
             while time.time() < end_time:
-                ascii_cc = self.sl4f.wlan_lib.wlanGetCountry(phy_id).get(
-                    'result')
+                ascii_cc = self.sl4f.wlan_lib.wlanGetCountry(phy_id).get("result")
                 # Convert ascii_cc to string, then compare
-                if ascii_cc and (''.join(chr(c) for c in ascii_cc).upper()
-                                 == desired_country_code):
-                    self.log.debug('Country code successfully set to %s.' %
-                                   desired_country_code)
+                if ascii_cc and (
+                    "".join(chr(c) for c in ascii_cc).upper() == desired_country_code
+                ):
+                    self.log.debug(
+                        "Country code successfully set to %s." % desired_country_code
+                    )
                     return
-                self.log.debug('Country code not yet updated. Retrying.')
+                self.log.debug("Country code not yet updated. Retrying.")
                 time.sleep(1)
-            raise FuchsiaDeviceError('Country code never updated to %s' %
-                                     desired_country_code)
+            raise FuchsiaDeviceError(
+                "Country code never updated to %s" % desired_country_code
+            )
 
     def stop_services(self):
-        """Stops the ffx daemon and deletes SL4F property."""
-        self.log.info('Stopping host device services.')
+        """Stops ffx daemon, deletes SSH property, and deletes SL4F property."""
+        self.log.info("Stopping host device services.")
         del self.sl4f
+        del self.ssh
         del self.ffx
 
     def load_config(self, config):
@@ -947,24 +930,25 @@
                 specified, the current time will be used.
         """
         if test_name:
-            self.log.info(
-                f"Taking snapshot of {self.mdns_name} for {test_name}")
+            self.log.info(f"Taking snapshot of {self.mdns_name} for {test_name}")
         else:
             self.log.info(f"Taking snapshot of {self.mdns_name}")
 
         epoch = begin_time if begin_time else utils.get_current_epoch_time()
         time_stamp = acts_logger.normalize_log_line_timestamp(
-            acts_logger.epoch_to_log_line_timestamp(epoch))
+            acts_logger.epoch_to_log_line_timestamp(epoch)
+        )
         out_dir = context.get_current_context().get_full_output_path()
-        out_path = os.path.join(out_dir, f'{self.mdns_name}_{time_stamp}.zip')
+        out_path = os.path.join(out_dir, f"{self.mdns_name}_{time_stamp}.zip")
 
         try:
             subprocess.run(
                 [f"ssh -F {self.ssh_config} {self.ip} snapshot > {out_path}"],
-                shell=True)
-            self.log.info(f'Snapshot saved to {out_path}')
+                shell=True,
+            )
+            self.log.info(f"Snapshot saved to {out_path}")
         except Exception as err:
-            self.log.error(f'Failed to take snapshot: {err}')
+            self.log.error(f"Failed to take snapshot: {err}")
 
     def take_bt_snoop_log(self, custom_name=None):
         """Takes a the bt-snoop log from the device and stores it in a file
@@ -972,16 +956,19 @@
         """
         bt_snoop_path = context.get_current_context().get_full_output_path()
         time_stamp = acts_logger.normalize_log_line_timestamp(
-            acts_logger.epoch_to_log_line_timestamp(time.time()))
+            acts_logger.epoch_to_log_line_timestamp(time.time())
+        )
         out_name = "FuchsiaDevice%s_%s" % (
-            self.serial, time_stamp.replace(" ", "_").replace(":", "-"))
+            self.serial,
+            time_stamp.replace(" ", "_").replace(":", "-"),
+        )
         out_name = "%s.pcap" % out_name
         if custom_name:
             out_name = "%s_%s.pcap" % (self.serial, custom_name)
         else:
             out_name = "%s.pcap" % out_name
         full_out_path = os.path.join(bt_snoop_path, out_name)
-        bt_snoop_data = self.ssh.run('bt-snoop-cli -d -f pcap').raw_stdout
-        bt_snoop_file = open(full_out_path, 'wb')
+        bt_snoop_data = self.ssh.run("bt-snoop-cli -d -f pcap").raw_stdout
+        bt_snoop_file = open(full_out_path, "wb")
         bt_snoop_file.write(bt_snoop_data)
         bt_snoop_file.close()
diff --git a/src/antlion/controllers/fuchsia_lib/audio_lib.py b/src/antlion/controllers/fuchsia_lib/audio_lib.py
deleted file mode 100644
index 02d974d..0000000
--- a/src/antlion/controllers/fuchsia_lib/audio_lib.py
+++ /dev/null
@@ -1,71 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import logger
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-import base64
-
-
-class FuchsiaAudioLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "audio")
-
-    def startOutputSave(self):
-        """Starts saving audio output on the device
-
-        Returns:
-            Dictionary is success, error if error.
-        """
-        test_cmd = "audio_facade.StartOutputSave"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def stopOutputSave(self):
-        """Stops saving audio output on the device
-
-        Returns:
-            Dictionary is success, error if error.
-        """
-        test_cmd = "audio_facade.StopOutputSave"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def getOutputAudio(self, save_path):
-        """Gets the saved audio in base64 encoding. Use base64.b64decode.
-
-        Args:
-            save_path: The path to save the raw audio
-
-        Returns:
-            True if success, False if error.
-        """
-        test_cmd = "audio_facade.GetOutputAudio"
-        test_args = {}
-
-        result = self.send_command(test_cmd, test_args)
-        if result.get("error") is not None:
-            self.log.error("Failed to get recorded audio.")
-            return False
-
-        f = open(save_path, "wb")
-        f.write(base64.b64decode(result.get('result')))
-        f.close()
-        self.log.info("Raw audio file captured at {}".format(save_path))
-        return True
diff --git a/src/antlion/controllers/fuchsia_lib/base_lib.py b/src/antlion/controllers/fuchsia_lib/base_lib.py
index 42da2ea..ea7f96e 100644
--- a/src/antlion/controllers/fuchsia_lib/base_lib.py
+++ b/src/antlion/controllers/fuchsia_lib/base_lib.py
@@ -18,11 +18,9 @@
 import socket
 
 from typing import Any, Mapping
-from urllib.parse import urlparse
 from urllib.request import Request, urlopen
 
-from antlion import logger, utils
-from antlion.libs.proc import job
+from antlion import logger
 
 DEFAULT_SL4F_RESPONSE_TIMEOUT_SEC = 30
 
@@ -35,17 +33,18 @@
     """A SL4F command to the server failed."""
 
 
-class BaseLib():
-
+class BaseLib:
     def __init__(self, addr: str, logger_tag: str) -> None:
         self.address = addr
-        self.log = logger.create_tagged_trace_logger(f"SL4F | {self.address} | {logger_tag}")
+        self.log = logger.create_tagged_trace_logger(
+            f"SL4F | {self.address} | {logger_tag}"
+        )
 
     def send_command(
         self,
         cmd: str,
         args: Mapping[str, Any],
-        response_timeout: int = DEFAULT_SL4F_RESPONSE_TIMEOUT_SEC
+        response_timeout: int = DEFAULT_SL4F_RESPONSE_TIMEOUT_SEC,
     ) -> Mapping[str, Any]:
         """Builds and sends a JSON command to SL4F server.
 
@@ -67,30 +66,26 @@
             # currently used.
             "id": "",
             "method": cmd,
-            "params": args
+            "params": args,
         }
         data_json = json.dumps(data).encode("utf-8")
-        req = Request(self.address,
-                      data=data_json,
-                      headers={
-                          "Content-Type": "application/json; charset=utf-8",
-                          "Content-Length": len(data_json),
-                      })
+        req = Request(
+            self.address,
+            data=data_json,
+            headers={
+                "Content-Type": "application/json; charset=utf-8",
+                "Content-Length": len(data_json),
+            },
+        )
 
         self.log.debug(f'Sending request "{cmd}" with {args}')
         try:
             response = urlopen(req, timeout=response_timeout)
-        except (TimeoutError, socket.timeout) as e:
-            host = urlparse(self.address).hostname
-            if not utils.can_ping(job, host):
-                raise DeviceOffline(
-                    f'FuchsiaDevice {host} is not reachable via the network.')
-            if type(e) == socket.timeout:
-                # socket.timeout was aliased to TimeoutError in Python 3.10. For
-                # older versions of Python, we need to cast to TimeoutError to
-                # provide a version-agnostic API.
-                raise TimeoutError("socket timeout") from e
-            raise e
+        except socket.timeout as e:
+            # socket.timeout was aliased to TimeoutError in Python 3.10. For
+            # older versions of Python, we need to cast to TimeoutError to
+            # provide a version-agnostic API.
+            raise TimeoutError("socket timeout") from e
 
         response_body = response.read().decode("utf-8")
         try:
diff --git a/src/antlion/controllers/fuchsia_lib/basemgr_lib.py b/src/antlion/controllers/fuchsia_lib/basemgr_lib.py
deleted file mode 100644
index 9e50e1e..0000000
--- a/src/antlion/controllers/fuchsia_lib/basemgr_lib.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-COMMAND_RESTART_SESSION = 'basemgr_facade.RestartSession'
-COMMAND_START_BASEMGR = 'basemgr_facade.StartBasemgr'
-COMMAND_KILL_BASEMGR = 'basemgr_facade.KillBasemgr'
-
-
-class FuchsiaBasemgrLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "basemgr")
-
-    def restartSession(self):
-        """Restarts an ongoing basemgr session
-
-        Returns:
-            Dictionary:
-                error: None, unless an error occurs
-                result: 'Success', 'NoSessionToRestart', or None if error
-        """
-        test_cmd = COMMAND_RESTART_SESSION
-
-        return self.send_command(test_cmd, {})
-
-    def startBasemgr(self):
-        """Starts basemgr service
-
-        Returns:
-            Dictionary:
-                error: None, unless an error occurs
-                result: 'Success' or None if error
-        """
-        test_cmd = COMMAND_START_BASEMGR
-
-        return self.send_command(test_cmd, {})
-
-    def killBasemgr(self):
-        """Kill basemgr service, if one is running
-
-        Returns:
-            Dictionary:
-                error: None, unless an error occurs
-                result: 'Success', 'NoBasemgrToKill', or None if error
-        """
-        test_cmd = COMMAND_KILL_BASEMGR
-
-        return self.send_command(test_cmd, {})
diff --git a/src/antlion/controllers/fuchsia_lib/bt/__init__.py b/src/antlion/controllers/fuchsia_lib/bt/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/fuchsia_lib/bt/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/fuchsia_lib/bt/avdtp_lib.py b/src/antlion/controllers/fuchsia_lib/bt/avdtp_lib.py
deleted file mode 100644
index 0af61d2..0000000
--- a/src/antlion/controllers/fuchsia_lib/bt/avdtp_lib.py
+++ /dev/null
@@ -1,229 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaAvdtpLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "avdtp")
-
-    def init(self, initiator_delay=None):
-        """Initializes the AVDTP service with optional initiator_delay.
-
-        Args:
-            initiator_delay: Optional. The delay in milliseconds to start a
-            stream.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpInit"
-        test_args = {"initiator_delay": initiator_delay}
-
-        return self.send_command(test_cmd, test_args)
-
-    def getConnectedPeers(self):
-        """Gets the AVDTP connected peers.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpGetConnectedPeers"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setConfiguration(self, peer_id):
-        """Sends the AVDTP command to input peer_id: set configuration
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpSetConfiguration"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def getConfiguration(self, peer_id):
-        """Sends the AVDTP command to input peer_id: get configuration
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpGetConfiguration"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def getCapabilities(self, peer_id):
-        """Sends the AVDTP command to input peer_id: get capabilities
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpGetCapabilities"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def getAllCapabilities(self, peer_id):
-        """Sends the AVDTP command to input peer_id: get all capabilities
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpGetAllCapabilities"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def reconfigureStream(self, peer_id):
-        """Sends the AVDTP command to input peer_id: reconfigure stream
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpReconfigureStream"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def suspendStream(self, peer_id):
-        """Sends the AVDTP command to input peer_id: suspend stream
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpSuspendStream"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def suspendAndReconfigure(self, peer_id):
-        """Sends the AVDTP command to input peer_id: suspend and reconfigure
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpSuspendAndReconfigure"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def releaseStream(self, peer_id):
-        """Sends the AVDTP command to input peer_id: release stream
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpReleaseStream"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def establishStream(self, peer_id):
-        """Sends the AVDTP command to input peer_id: establish stream
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpEstablishStream"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def startStream(self, peer_id):
-        """Sends the AVDTP command to input peer_id: start stream
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpStartStream"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def abortStream(self, peer_id):
-        """Sends the AVDTP command to input peer_id: abort stream
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpAbortStream"
-        test_args = {"identifier": peer_id}
-        test_id = self.build_id(self.test_counter)
-        self.test_counter += 1
-
-        return self.send_command(test_cmd, test_args)
-
-    def establishStream(self, peer_id):
-        """Sends the AVDTP command to input peer_id: establish stream
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpEstablishStream"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def removeService(self):
-        """Removes the AVDTP service from the Fuchsia device
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpRemoveService"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/bt/ble_lib.py b/src/antlion/controllers/fuchsia_lib/bt/ble_lib.py
deleted file mode 100644
index 1d7e622..0000000
--- a/src/antlion/controllers/fuchsia_lib/bt/ble_lib.py
+++ /dev/null
@@ -1,133 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-import uuid
-
-
-class FuchsiaBleLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "ble")
-
-    def _convert_human_readable_uuid_to_byte_list(self, readable_uuid):
-        """Converts a readable uuid to a byte list.
-
-        Args:
-            readable_uuid: string, A readable uuid in the format:
-                Input: "00001101-0000-1000-8000-00805f9b34fb"
-                Output: ['fb', '34', '9b', '5f', '80', '00', '00', '80', '00',
-                         '10', '00', '00', '01', '11', '00', '00']
-
-        Returns:
-            A byte list representing the readable uuid.
-        """
-        hex_uuid_str = uuid.UUID(readable_uuid).hex
-        break_n_bytes = 2
-        byte_list = [
-            hex_uuid_str[i:i + break_n_bytes]
-            for i in range(0, len(hex_uuid_str), break_n_bytes)
-        ]
-        byte_list.reverse()
-        return byte_list
-
-    def bleStopBleAdvertising(self):
-        """BleStopAdvertising command
-
-        Returns:
-            Dictionary, None if success, error string if error.
-        """
-        test_cmd = "ble_advertise_facade.BleStopAdvertise"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def bleStartBleAdvertising(self,
-                               advertising_data,
-                               scan_response,
-                               interval,
-                               connectable=True):
-        """BleStartAdvertising command
-
-        Args:
-            advertising_data: dictionary, advertising data required for ble
-                advertise.
-            scan_response: dictionary, optional scan respones data to send.
-            interval: int, Advertising interval (in ms).
-            connectable: bool, whether the advertisement is connectable or not.
-
-        Returns:
-            Dictionary, None if success, error string if error.
-        """
-        test_cmd = "ble_advertise_facade.BleAdvertise"
-        service_uuid_list = None
-        if type(advertising_data['service_uuids']) == list:
-            service_uuid_list = []
-            for single_uuid in advertising_data['service_uuids']:
-                service_uuid_list.append(
-                    self._convert_human_readable_uuid_to_byte_list(
-                        single_uuid))
-            advertising_data['service_uuids'] = service_uuid_list
-
-        service_uuid_list = None
-        if scan_response and type(scan_response['service_uuids']) == list:
-            service_uuid_list = []
-            for single_uuid in scan_response['service_uuids']:
-                service_uuid_list.append(
-                    self._convert_human_readable_uuid_to_byte_list(
-                        single_uuid))
-            scan_response['service_uuids'] = service_uuid_list
-
-        if scan_response and type(scan_response['service_data']) == list:
-            for service_data in scan_response['service_data']:
-                service_data[
-                    "uuid"] = self._convert_human_readable_uuid_to_byte_list(
-                        service_data["uuid"])
-
-        if type(advertising_data['service_data']) == list:
-            for service_data in advertising_data['service_data']:
-                service_data[
-                    "uuid"] = self._convert_human_readable_uuid_to_byte_list(
-                        service_data["uuid"])
-
-        test_args = {
-            "advertising_data": advertising_data,
-            "scan_response": scan_response,
-            "interval_ms": interval,
-            "connectable": connectable
-        }
-        return self.send_command(test_cmd, test_args)
-
-    def blePublishService(self, primary, type_, service_id):
-        """Publishes services specified by input args
-
-        Args:
-            primary: bool, Flag of service.
-            type: string, Canonical 8-4-4-4-12 uuid of service.
-            service_proxy_key: string, Unique identifier to specify where to publish service
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bluetooth.BlePublishService"
-        test_args = {
-            "primary": primary,
-            "type": type_,
-            "local_service_id": service_id
-        }
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/bt/bts_lib.py b/src/antlion/controllers/fuchsia_lib/bt/bts_lib.py
deleted file mode 100644
index 6a94c6b..0000000
--- a/src/antlion/controllers/fuchsia_lib/bt/bts_lib.py
+++ /dev/null
@@ -1,227 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaBtsLib(BaseLib):
-    # Class representing the Bluetooth Access Library.
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "bt_sys")
-
-    def setDiscoverable(self, discoverable):
-        """Sets the device to be discoverable over BR/EDR.
-
-        Args:
-            discoverable: A bool object for setting Bluetooth
-              device discoverable or not.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothSetDiscoverable"
-        test_args = {"discoverable": discoverable}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setName(self, name):
-        """Sets the local Bluetooth name of the device.
-
-        Args:
-            name: A string that represents the name to set.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothSetName"
-        test_args = {"name": name}
-
-        return self.send_command(test_cmd, test_args)
-
-    def inputPairingPin(self, pin):
-        """Inputs the pairing pin to the Fuchsia devices' pairing delegate.
-
-        Args:
-            pin: A string that represents the pin to input.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothInputPairingPin"
-        test_args = {"pin": pin}
-
-        return self.send_command(test_cmd, test_args)
-
-    def getPairingPin(self):
-        """Gets the pairing pin from the Fuchsia devices' pairing delegate.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothGetPairingPin"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def initBluetoothSys(self):
-        """Initialises the Bluetooth sys Interface proxy in SL4F.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothInitSys"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def requestDiscovery(self, discovery):
-        """Start or stop Bluetooth Control device discovery.
-
-        Args:
-            discovery: A bool object representing starting or stopping
-              device discovery.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothRequestDiscovery"
-        test_args = {"discovery": discovery}
-
-        return self.send_command(test_cmd, test_args)
-
-    def getKnownRemoteDevices(self):
-        """Get known remote BR/EDR and LE devices.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothGetKnownRemoteDevices"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def forgetDevice(self, identifier):
-        """Forgets a devices pairing.
-
-        Args:
-            identifier: A string representing the device id.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothForgetDevice"
-        test_args = {"identifier": identifier}
-
-        return self.send_command(test_cmd, test_args)
-
-    def disconnectDevice(self, identifier):
-        """Disconnects a devices.
-
-        Args:
-            identifier: A string representing the device id.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothDisconnectDevice"
-        test_args = {"identifier": identifier}
-
-        return self.send_command(test_cmd, test_args)
-
-    def connectDevice(self, identifier):
-        """Connects to a devices.
-
-        Args:
-            identifier: A string representing the device id.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothConnectDevice"
-        test_args = {"identifier": identifier}
-
-        return self.send_command(test_cmd, test_args)
-
-    def getActiveAdapterAddress(self):
-        """Gets the current Active Adapter's address.
-
-        Returns:
-            Dictionary, String address if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothGetActiveAdapterAddress"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def pair(self, identifier, pairing_security_level, non_bondable,
-             transport):
-        """Pairs to a device.
-
-        Args:
-            identifier: A string representing the device id.
-            pairing_security_level: The security level required for this pairing request
-                represented as a u64. (Only for LE pairing)
-                Available Values
-                1 - ENCRYPTED: Encrypted without MITM protection (unauthenticated)
-                2 - AUTHENTICATED: Encrypted with MITM protection (authenticated).
-                None: No pairing security level.
-            non_bondable: A bool representing whether the pairing mode is bondable or not. None is
-                also accepted. False if bondable, True if non-bondable.
-            transport: A u64 representing the transport type.
-                Available Values
-                1 - BREDR: Classic BR/EDR transport
-                2 - LE: Bluetooth Low Energy Transport
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothPairDevice"
-        test_args = {
-            "identifier": identifier,
-            "pairing_security_level": pairing_security_level,
-            "non_bondable": non_bondable,
-            "transport": transport,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def acceptPairing(self,
-                      input_capabilities="NONE",
-                      output_capabilities="NONE"):
-        """Accepts incoming pairing requests.
-
-        Args:
-            input: String - The input I/O capabilities to use
-                Available Values:
-                NONE - Input capability type None
-                CONFIRMATION - Input capability type confirmation
-                KEYBOARD - Input capability type Keyboard
-            output: String - The output I/O Capabilities to use
-                Available Values:
-                NONE - Output capability type None
-                DISPLAY - output capability type Display
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothAcceptPairing"
-        test_args = {
-            "input": input_capabilities,
-            "output": output_capabilities,
-        }
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/bt/gattc_lib.py b/src/antlion/controllers/fuchsia_lib/bt/gattc_lib.py
deleted file mode 100644
index b8630d9..0000000
--- a/src/antlion/controllers/fuchsia_lib/bt/gattc_lib.py
+++ /dev/null
@@ -1,350 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaGattcLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "gatt_client")
-
-    def bleStartBleScan(self, scan_filter):
-        """Starts a BLE scan
-
-        Args:
-            scan_time_ms: int, Amount of time to scan for.
-            scan_filter: dictionary, Device filter for a scan.
-            scan_count: int, Number of devices to scan for before termination.
-
-        Returns:
-            None if pass, err if fail.
-        """
-        test_cmd = "gatt_client_facade.BleStartScan"
-        test_args = {
-            "filter": scan_filter,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def bleStopBleScan(self):
-        """Stops a BLE scan
-
-        Returns:
-            Dictionary, List of devices discovered, error string if error.
-        """
-        test_cmd = "gatt_client_facade.BleStopScan"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def listServices(self, id):
-        """Lists services of a peripheral specified by id.
-
-        Args:
-            id: string, Peripheral identifier to list services.
-
-        Returns:
-            Dictionary, List of Service Info if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcListServices"
-        test_args = {"identifier": id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def bleGetDiscoveredDevices(self):
-        """Stops a BLE scan
-
-        Returns:
-            Dictionary, List of devices discovered, error string if error.
-        """
-        test_cmd = "gatt_client_facade.BleGetDiscoveredDevices"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def discoverCharacteristics(self):
-        """Discover the characteristics of a connected service.
-
-        Returns:
-            Dictionary, List of Characteristics and Descriptors if success,
-            error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcDiscoverCharacteristics"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def writeCharById(self, id, offset, write_value):
-        """Write Characteristic by id..
-
-        Args:
-            id: string, Characteristic identifier.
-            offset: int, The offset of bytes to write to.
-            write_value: byte array, The bytes to write.
-
-        Returns:
-            None if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcWriteCharacteristicById"
-        test_args = {
-            "identifier": id,
-            "offset": offset,
-            "write_value": write_value,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def writeLongCharById(self, id, offset, write_value, reliable_mode=False):
-        """Write Characteristic by id.
-
-        Args:
-            id: string, Characteristic identifier.
-            offset: int, The offset of bytes to write to.
-            write_value: byte array, The bytes to write.
-            reliable_mode: bool value representing reliable writes.
-
-        Returns:
-            None if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcWriteLongCharacteristicById"
-        test_args = {
-            "identifier": id,
-            "offset": offset,
-            "write_value": write_value,
-            "reliable_mode": reliable_mode
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def writeLongDescById(self, id, offset, write_value):
-        """Write Descriptor by id.
-
-        Args:
-            id: string, Characteristic identifier.
-            offset: int, The offset of bytes to write to.
-            write_value: byte array, The bytes to write.
-
-        Returns:
-            None if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcWriteLongDescriptorById"
-        test_args = {
-            "identifier": id,
-            "offset": offset,
-            "write_value": write_value,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def writeCharByIdWithoutResponse(self, id, write_value):
-        """Write Characteristic by id without response.
-
-        Args:
-            id: string, Characteristic identifier.
-            write_value: byte array, The bytes to write.
-
-        Returns:
-            None if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcWriteCharacteristicByIdWithoutResponse"
-        test_args = {
-            "identifier": id,
-            "write_value": write_value,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def enableNotifyCharacteristic(self, id):
-        """Enable notifications on a Characteristic.
-
-        Args:
-            id: string, Characteristic identifier.
-
-        Returns:
-            None if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcEnableNotifyCharacteristic"
-        test_args = {
-            "identifier": id,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def disableNotifyCharacteristic(self, id):
-        """Disable notifications on a Characteristic.
-
-        Args:
-            id: string, Characteristic identifier.
-
-        Returns:
-            None if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcDisableNotifyCharacteristic"
-        test_args = {
-            "identifier": id,
-            "value": False,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def readCharacteristicById(self, id):
-        """Read Characteristic value by id..
-
-        Args:
-            id: string, Characteristic identifier.
-
-        Returns:
-            Characteristic value if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcReadCharacteristicById"
-        test_args = {
-            "identifier": id,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def readCharacteristicByType(self, uuid):
-        """Read Characteristic value by id..
-
-        Args:
-            uuid: string, Characteristic identifier.
-
-        Returns:
-            Characteristic value if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcReadCharacteristicByType"
-        test_args = {
-            "uuid": uuid,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def readDescriptorById(self, id):
-        """Read Descriptor value by id..
-
-        Args:
-            id: string, Descriptor identifier.
-
-        Returns:
-            Descriptor value if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcReadDescriptorById"
-        test_args = {
-            "identifier": id,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def readLongDescriptorById(self, id, offset, max_bytes):
-        """Reads Long Descriptor value by id.
-
-        Args:
-            id: string, Descriptor identifier.
-            offset: int, The offset to start reading from.
-            max_bytes: int, The max bytes to return.
-
-        Returns:
-            Descriptor value if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcReadLongDescriptorById"
-        test_args = {
-            "identifier": id,
-            "offset": offset,
-            "max_bytes": max_bytes
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def writeDescriptorById(self, id, offset, write_value):
-        """Write Descriptor by id.
-
-        Args:
-            id: string, Descriptor identifier.
-            write_value: byte array, The bytes to write.
-
-        Returns:
-            None if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcWriteDescriptorById"
-        test_args = {
-            "identifier": id,
-            "write_value": write_value,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def readLongCharacteristicById(self, id, offset, max_bytes):
-        """Reads Long Characteristic value by id.
-
-        Args:
-            id: string, Characteristic identifier.
-            offset: int, The offset to start reading from.
-            max_bytes: int, The max bytes to return.
-
-        Returns:
-            Characteristic value if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcReadLongCharacteristicById"
-        test_args = {
-            "identifier": id,
-            "offset": offset,
-            "max_bytes": max_bytes
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def connectToService(self, id, service_id):
-        """ Connect to a specific Service specified by id.
-
-        Args:
-            id: string, Service id.
-
-        Returns:
-            None if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcConnectToService"
-        test_args = {"identifier": id, "service_identifier": service_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def bleConnectToPeripheral(self, id):
-        """Connects to a peripheral specified by id.
-
-        Args:
-            id: string, Peripheral identifier to connect to.
-
-        Returns:
-            Dictionary, List of Service Info if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.BleConnectPeripheral"
-        test_args = {"identifier": id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def bleDisconnectPeripheral(self, id):
-        """Disconnects from a peripheral specified by id.
-
-        Args:
-            id: string, Peripheral identifier to disconnect from.
-
-        Returns:
-            Dictionary, None if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.BleDisconnectPeripheral"
-        test_args = {"identifier": id}
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/bt/gatts_lib.py b/src/antlion/controllers/fuchsia_lib/bt/gatts_lib.py
deleted file mode 100644
index 5f9ecb4..0000000
--- a/src/antlion/controllers/fuchsia_lib/bt/gatts_lib.py
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaGattsLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "gatt_server")
-
-    def publishServer(self, database):
-        """Publishes services specified by input args
-
-        Args:
-            database: A database that follows the conventions of
-                acts_contrib.test_utils.bt.gatt_test_database.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "gatt_server_facade.GattServerPublishServer"
-        test_args = {
-            "database": database,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def closeServer(self):
-        """Closes an active GATT server.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "gatt_server_facade.GattServerCloseServer"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/bt/hfp_lib.py b/src/antlion/controllers/fuchsia_lib/bt/hfp_lib.py
deleted file mode 100644
index e8f68f3..0000000
--- a/src/antlion/controllers/fuchsia_lib/bt/hfp_lib.py
+++ /dev/null
@@ -1,420 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaHfpLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "hfp")
-
-    def init(self):
-        """Initializes the HFP service.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.HfpInit"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def removeService(self):
-        """Removes the HFP service from the Fuchsia device
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.HfpRemoveService"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def listPeers(self):
-        """List all connected HFP peer devices.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.ListPeers"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setActivePeer(self, peer_id):
-        """Set the active HFP peer device. All peer specific commands will be
-        directed to this device.
-
-        Args:
-            peer_id: The id of the peer to set as active. Use "listPeers" to
-            find connected peer ids.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetActivePeer"
-        test_args = {"peer_id": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def listCalls(self):
-        """List all calls known to the sl4f component.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.ListCalls"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def newCall(self, remote, state, direction):
-        """Opens a new call channel and alerts the HFP peer.
-
-        Args:
-            remote: The number of the remote party.
-            state: The state of the call.
-            direction: The direction of the call. Can be "incoming" or "outgoing".
-
-        Returns:
-            Dictionary, call_id if success, error if error.
-        """
-        test_cmd = "hfp_facade.NewCall"
-        test_args = {"remote": remote, "state": state, "direction": direction}
-
-        return self.send_command(test_cmd, test_args)
-
-    def initiateIncomingCall(self, remote):
-        """Opens an incoming call channel and alerts the HFP peer.
-
-        Args:
-            remote: The number of the remote party.
-
-        Returns:
-            Dictionary, call_id if success, error if error.
-        """
-        test_cmd = "hfp_facade.IncomingCall"
-        test_args = {"remote": remote}
-
-        return self.send_command(test_cmd, test_args)
-
-    def initiateIncomingWaitingCall(self, remote):
-        """Opens an incoming call when there is an onging call and alerts
-        the HFP peer.
-
-        Args:
-            remote: The number of the remote party.
-
-        Returns:
-            Dictionary, call_id if success, error if error.
-        """
-        test_cmd = "hfp_facade.IncomingWaitingCall"
-        test_args = {"remote": remote}
-
-        return self.send_command(test_cmd, test_args)
-
-    def initiateOutgoingCall(self, remote):
-        """Opens an outgoing call channel and alerts the HFP peer.
-
-        Args:
-            remote: The number of the remote party.
-
-        Returns:
-            Dictionary, call_id if success, error if error.
-        """
-        test_cmd = "hfp_facade.OutgoingCall"
-        test_args = {"remote": remote}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setCallActive(self, call_id):
-        """Sets the specified call to the "OngoingActive" state.
-
-        Args:
-            call_id: The unique id of the call.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetCallActive"
-        test_args = {"call_id": call_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setCallHeld(self, call_id):
-        """Sets the specified call to the "OngoingHeld" state.
-
-        Args:
-            call_id: The unique id of the call.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetCallHeld"
-        test_args = {"call_id": call_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setCallTerminated(self, call_id):
-        """Sets the specified call to the "Terminated" state.
-
-        Args:
-            call_id: The unique id of the call.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetCallTerminated"
-        test_args = {"call_id": call_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setCallTransferredToAg(self, call_id):
-        """Sets the specified call to the "TransferredToAg" state.
-
-        Args:
-            call_id: The unique id of the call.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetCallTransferredToAg"
-        test_args = {"call_id": call_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setSpeakerGain(self, value):
-        """Sets the active peer's speaker gain.
-
-        Args:
-            value: The gain value to set. Must be between 0-15 inclusive.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetSpeakerGain"
-        test_args = {"value": value}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setMicrophoneGain(self, value):
-        """Sets the active peer's microphone gain.
-
-        Args:
-            value: The gain value to set. Must be between 0-15 inclusive.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetMicrophoneGain"
-        test_args = {"value": value}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setServiceAvailable(self, value):
-        """Sets the simulated network service status reported by the call manager.
-
-        Args:
-            value: True to set the network service to available.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetServiceAvailable"
-        test_args = {"value": value}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setRoaming(self, value):
-        """Sets the simulated roaming status reported by the call manager.
-
-        Args:
-            value: True to set the network connection to roaming.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetRoaming"
-        test_args = {"value": value}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setSignalStrength(self, value):
-        """Sets the simulated signal strength reported by the call manager.
-
-        Args:
-            value: The signal strength value to set. Must be between 0-5 inclusive.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetSignalStrength"
-        test_args = {"value": value}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setSubscriberNumber(self, value):
-        """Sets the subscriber number reported by the call manager.
-
-        Args:
-            value: The subscriber number to set. Maximum length 128 characters.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetSubscriberNumber"
-        test_args = {"value": value}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setOperator(self, value):
-        """Sets the operator value reported by the call manager.
-
-        Args:
-            value: The operator value to set. Maximum length 16 characters.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetOperator"
-        test_args = {"value": value}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setNrecSupport(self, value):
-        """Sets the noise reduction/echo cancelation support reported by the call manager.
-
-        Args:
-            value: The nrec support bool.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetNrecSupport"
-        test_args = {"value": value}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setBatteryLevel(self, value):
-        """Sets the battery level reported by the call manager.
-
-        Args:
-            value: The integer battery level value. Must be 0-5 inclusive.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetBatteryLevel"
-        test_args = {"value": value}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setLastDialed(self, number):
-        """Sets the last dialed number in the call manager.
-
-        Args:
-            number: The number of the remote party.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetLastDialed"
-        test_args = {"number": number}
-
-        return self.send_command(test_cmd, test_args)
-
-    def clearLastDialed(self):
-        """Clears the last dialed number in the call manager.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.ClearLastDialed"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setMemoryLocation(self, location, number):
-        """Sets a memory location to point to a remote number.
-
-        Args:
-            location: The memory location at which to store the number.
-            number: The number of the remote party to be stored.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetMemoryLocation"
-        test_args = {"location": location, "number": number}
-
-        return self.send_command(test_cmd, test_args)
-
-    def clearMemoryLocation(self, location):
-        """Clear a memory location so that it no longer points to a remote
-        number.
-
-        Args:
-            localtion: The memory location to clear.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.ClearMemoryLocation"
-        test_args = {"location": location}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setDialResult(self, number, status):
-        """Sets the status result to be returned when the number is dialed.
-
-        Args:
-            number: The number of the remote party.
-            status: The status to be returned when an outgoing call is
-                    initiated to the number.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetDialResult"
-        test_args = {"number": number, "status": status}
-
-        return self.send_command(test_cmd, test_args)
-
-    def getState(self):
-        """Get the call manager's state.
-
-        Returns:
-            Dictionary, State dictionary if success, error if error.
-        """
-        test_cmd = "hfp_facade.GetState"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setConnectionBehavior(self, autoconnect):
-        """Set the Service Level Connection behavior when a new peer connects.
-
-        Args:
-            autoconnect: Enable/Disable autoconnection of SLC.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetConnectionBehavior"
-        test_args = {"autoconnect": autoconnect}
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/bt/rfcomm_lib.py b/src/antlion/controllers/fuchsia_lib/bt/rfcomm_lib.py
deleted file mode 100644
index 10f0736..0000000
--- a/src/antlion/controllers/fuchsia_lib/bt/rfcomm_lib.py
+++ /dev/null
@@ -1,113 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaRfcommLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "rfcomm")
-
-    def init(self):
-        """Initializes the RFCOMM service.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "rfcomm_facade.RfcommInit"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def removeService(self):
-        """Removes the RFCOMM service from the Fuchsia device
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "rfcomm_facade.RfcommRemoveService"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def disconnectSession(self, peer_id):
-        """Closes the RFCOMM Session with the remote peer
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "rfcomm_facade.DisconnectSession"
-        test_args = {"peer_id": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def connectRfcommChannel(self, peer_id, server_channel_number):
-        """Makes an outgoing RFCOMM connection to the remote peer
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "rfcomm_facade.ConnectRfcommChannel"
-        test_args = {
-            "peer_id": peer_id,
-            "server_channel_number": server_channel_number
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def disconnectRfcommChannel(self, peer_id, server_channel_number):
-        """Closes the RFCOMM channel with the remote peer
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "rfcomm_facade.DisconnectRfcommChannel"
-        test_args = {
-            "peer_id": peer_id,
-            "server_channel_number": server_channel_number
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def sendRemoteLineStatus(self, peer_id, server_channel_number):
-        """Sends a Remote Line Status update to the remote peer for the provided channel number
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "rfcomm_facade.SendRemoteLineStatus"
-        test_args = {
-            "peer_id": peer_id,
-            "server_channel_number": server_channel_number
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def writeRfcomm(self, peer_id, server_channel_number, data):
-        """Sends data to the remote peer over the RFCOMM channel
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "rfcomm_facade.RfcommWrite"
-        test_args = {
-            "peer_id": peer_id,
-            "server_channel_number": server_channel_number,
-            "data": data
-        }
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/bt/sdp_lib.py b/src/antlion/controllers/fuchsia_lib/bt/sdp_lib.py
deleted file mode 100644
index 25dcc33..0000000
--- a/src/antlion/controllers/fuchsia_lib/bt/sdp_lib.py
+++ /dev/null
@@ -1,136 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaProfileServerLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "profile_server")
-
-    def addService(self, record):
-        """Publishes an SDP service record specified by input args
-
-        Args:
-            record: A database that represents an SDP record to
-                be published.
-
-        Returns:
-            Dictionary, service id if success, error if error.
-        """
-        test_cmd = "profile_server_facade.ProfileServerAddService"
-        test_args = {
-            "record": record,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def addSearch(self, attribute_list, profile_id):
-        """Publishes services specified by input args
-
-        Args:
-            attribute_list: The list of attributes to set
-            profile_id: The profile ID to set.
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "profile_server_facade.ProfileServerAddSearch"
-        test_args = {
-            "attribute_list": attribute_list,
-            "profile_id": profile_id
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def removeService(self, service_id):
-        """Removes a service.
-
-        Args:
-            record: A database that represents an SDP record to
-                be published.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "profile_server_facade.ProfileServerRemoveService"
-        test_args = {
-            "service_id": service_id,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def init(self):
-        """Initializes the ProfileServerFacade's proxy object.
-
-        No operations for SDP can be performed until this is initialized.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "profile_server_facade.ProfileServerInit"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def cleanUp(self):
-        """Cleans up all objects related to SDP.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "profile_server_facade.ProfileServerCleanup"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def connectL2cap(self, identifier, psm, mode):
-        """ Sends an outgoing l2cap connection to a connected peer device.
-
-        Args:
-            psm: The psm value to connect over. Available PSMs:
-                SDP 0x0001  See Bluetooth Service Discovery Protocol (SDP)
-                RFCOMM  0x0003  See RFCOMM with TS 07.10
-                TCS-BIN 0x0005  See Bluetooth Telephony Control Specification /
-                    TCS Binary
-                TCS-BIN-CORDLESS    0x0007  See Bluetooth Telephony Control
-                    Specification / TCS Binary
-                BNEP    0x000F  See Bluetooth Network Encapsulation Protocol
-                HID_Control 0x0011  See Human Interface Device
-                HID_Interrupt   0x0013  See Human Interface Device
-                UPnP    0x0015  See [ESDP]
-                AVCTP   0x0017  See Audio/Video Control Transport Protocol
-                AVDTP   0x0019  See Audio/Video Distribution Transport Protocol
-                AVCTP_Browsing  0x001B  See Audio/Video Remote Control Profile
-                UDI_C-Plane 0x001D  See the Unrestricted Digital Information
-                    Profile [UDI]
-                ATT 0x001F  See Bluetooth Core Specification​
-                ​3DSP   0x0021​ ​​See 3D Synchronization Profile.
-                ​LE_PSM_IPSP    ​0x0023 ​See Internet Protocol Support Profile
-                    (IPSP)
-                OTS 0x0025  See Object Transfer Service (OTS)
-                EATT    0x0027  See Bluetooth Core Specification
-            mode: String - The channel mode to connect to. Available values:
-                Basic mode: BASIC
-                Enhanced Retransmission mode: ERTM
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "profile_server_facade.ProfileServerConnectL2cap"
-        test_args = {"identifier": identifier, "psm": psm, "mode": mode}
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/device_lib.py b/src/antlion/controllers/fuchsia_lib/device_lib.py
new file mode 100644
index 0000000..f7ad6b6
--- /dev/null
+++ b/src/antlion/controllers/fuchsia_lib/device_lib.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python3
+#
+# Copyright 2023 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from antlion.controllers.fuchsia_lib.base_lib import BaseLib
+
+
+class DeviceLib(BaseLib):
+    def __init__(self, addr: str) -> None:
+        super().__init__(addr, "device")
+
+    def get_device_name(self) -> str:
+        """Get the device name."""
+
+        return self.send_command("device_facade.GetDeviceName", {})
+
+    def get_product_name(self) -> str:
+        """Get the product name."""
+
+        return self.send_command("device_facade.GetProduct", {})
+
+    def get_version(self):
+        """Get the device version."""
+
+        return self.send_command("device_facade.GetVersion", {})
diff --git a/src/antlion/controllers/fuchsia_lib/ffx.py b/src/antlion/controllers/fuchsia_lib/ffx.py
index ca05feb..3db6c8d 100644
--- a/src/antlion/controllers/fuchsia_lib/ffx.py
+++ b/src/antlion/controllers/fuchsia_lib/ffx.py
@@ -35,11 +35,10 @@
 class FFXError(signals.TestError):
     """Non-zero error code returned from a ffx command."""
 
-    def __init__(self, command: str,
-                 process: subprocess.CalledProcessError) -> None:
+    def __init__(self, command: str, process: subprocess.CalledProcessError) -> None:
         self.command = command
-        self.stdout: str = process.stdout.decode('utf-8', errors='replace')
-        self.stderr: str = process.stderr.decode('utf-8', errors='replace')
+        self.stdout: str = process.stdout.decode("utf-8", errors="replace")
+        self.stderr: str = process.stderr.decode("utf-8", errors="replace")
         self.exit_status = process.returncode
 
     def __str__(self) -> str:
@@ -61,11 +60,13 @@
         ssh_private_key_path: Path to Fuchsia DUT SSH private key.
     """
 
-    def __init__(self,
-                 binary_path: str,
-                 mdns_name: str,
-                 ip: str = None,
-                 ssh_private_key_path: str = None):
+    def __init__(
+        self,
+        binary_path: str,
+        mdns_name: str,
+        ip: str = None,
+        ssh_private_key_path: str = None,
+    ):
         """
         Args:
             binary_path: Path to ffx binary.
@@ -103,12 +104,13 @@
         self._has_been_reachable = False
         self._has_logged_version = False
 
-    def run(self,
-            command: str,
-            timeout_sec: int = FFX_DEFAULT_COMMAND_TIMEOUT,
-            skip_status_code_check: bool = False,
-            skip_reachability_check: bool = False
-            ) -> subprocess.CompletedProcess:
+    def run(
+        self,
+        command: str,
+        timeout_sec: int = FFX_DEFAULT_COMMAND_TIMEOUT,
+        skip_status_code_check: bool = False,
+        skip_reachability_check: bool = False,
+    ) -> subprocess.CompletedProcess:
         """Runs an ffx command.
 
         Verifies reachability before running, if it hasn't already.
@@ -135,13 +137,15 @@
             self.verify_reachable()
 
         self.log.debug(f'Running "{command}".')
-        full_command = f'{self.binary_path} -e {self._env_config_path} {command}'
+        full_command = f"{self.binary_path} -e {self._env_config_path} {command}"
 
         try:
-            result = subprocess.run(full_command.split(),
-                                    capture_output=True,
-                                    timeout=timeout_sec,
-                                    check=not skip_status_code_check)
+            result = subprocess.run(
+                full_command.split(),
+                capture_output=True,
+                timeout=timeout_sec,
+                check=not skip_status_code_check,
+            )
         except subprocess.CalledProcessError as e:
             raise FFXError(command, e) from e
         except subprocess.TimeoutExpired as e:
@@ -150,7 +154,7 @@
         return result
 
     def _create_isolated_environment(self) -> None:
-        """ Create a new isolated environment for ffx.
+        """Create a new isolated environment for ffx.
 
         This is needed to avoid overlapping ffx daemons while testing in
         parallel, causing the ffx invocations to “upgrade” one daemon to
@@ -163,8 +167,9 @@
         root_dir = context.get_current_context().get_full_output_path()
         epoch = utils.get_current_epoch_time()
         time_stamp = logger.normalize_log_line_timestamp(
-            logger.epoch_to_log_line_timestamp(epoch))
-        target_dir = os.path.join(root_dir, f'{self.mdns_name}_{time_stamp}')
+            logger.epoch_to_log_line_timestamp(epoch)
+        )
+        target_dir = os.path.join(root_dir, f"{self.mdns_name}_{time_stamp}")
         os.makedirs(target_dir, exist_ok=True)
 
         # Sockets need to be created in a different directory to be guaranteed
@@ -173,10 +178,8 @@
         self._sock_dir = tempfile.mkdtemp()
         # On MacOS, the socket paths need to be just paths (not pre-created
         # Python tempfiles, which are not socket files).
-        self._ssh_auth_sock_path = str(
-            PurePath(self._sock_dir, 'ssh_auth_sock'))
-        self._overnet_socket_path = str(
-            PurePath(self._sock_dir, 'overnet_socket'))
+        self._ssh_auth_sock_path = str(PurePath(self._sock_dir, "ssh_auth_sock"))
+        self._overnet_socket_path = str(PurePath(self._sock_dir, "overnet_socket"))
 
         config: MutableMapping[str, Any] = {
             "target": {
@@ -205,6 +208,14 @@
                     "disabled": True,
                 },
             },
+            # Prevent log collection from all devices the ffx daemon sees; only
+            # collect logs from the target device.
+            #
+            # TODO(https://fxbug.dev/118764): Consider re-enabling after
+            # resolution of the issue causing a reboot of the target device.
+            "proactive_log": {
+                "enabled": False,
+            },
         }
 
         if self.ip:
@@ -220,7 +231,7 @@
             config["ssh"]["priv"] = self.ssh_private_key_path
 
         config_path = os.path.join(target_dir, "ffx_config.json")
-        with open(config_path, 'w', encoding="utf-8") as f:
+        with open(config_path, "w", encoding="utf-8") as f:
             json.dump(config, f, ensure_ascii=False, indent=4)
 
         env = {
@@ -229,15 +240,13 @@
             "global": None,
         }
         self._env_config_path = os.path.join(target_dir, "ffx_env.json")
-        with open(self._env_config_path, 'w', encoding="utf-8") as f:
+        with open(self._env_config_path, "w", encoding="utf-8") as f:
             json.dump(env, f, ensure_ascii=False, indent=4)
 
         # The ffx daemon will started automatically when needed. There is no
         # need to start it manually here.
 
-    def verify_reachable(self,
-                         timeout_sec: int = FFX_DEFAULT_COMMAND_TIMEOUT
-                         ) -> None:
+    def verify_reachable(self, timeout_sec: int = FFX_DEFAULT_COMMAND_TIMEOUT) -> None:
         """Verify the target is reachable via RCS and various services.
 
         Blocks until the device allows for an RCS connection. If the device
@@ -271,7 +280,7 @@
                 self.run(cmd, timeout_sec=5, skip_reachability_check=True)
                 break
             except FFXError as e:
-                if 'took too long connecting to ascendd socket' in e.stderr:
+                if "took too long connecting to ascendd socket" in e.stderr:
                     err = e
                 else:
                     raise e
@@ -280,20 +289,21 @@
 
             if time.perf_counter() > timeout:
                 raise FFXTimeout(
-                    f'Waited over {timeout_sec}s for ffx to become reachable'
+                    f"Waited over {timeout_sec}s for ffx to become reachable"
                 ) from err
 
         # Use a shorter timeout than default because device information
         # gathering can hang for a long time if the device is not actually
         # connectable.
         try:
-            result = self.run("target show --json",
-                              timeout_sec=15,
-                              skip_reachability_check=True)
+            result = self.run(
+                "target show --json", timeout_sec=15, skip_reachability_check=True
+            )
         except Exception as e:
             self.log.error(
                 f'Failed to reach target device. Try running "{self.binary_path}'
-                + ' doctor" to diagnose issues.')
+                + ' doctor" to diagnose issues.'
+            )
             raise e
 
         self._has_been_reachable = True
@@ -302,8 +312,7 @@
             self._has_logged_version = True
             self.compare_version(result)
 
-    def compare_version(
-            self, target_show_result: subprocess.CompletedProcess) -> None:
+    def compare_version(self, target_show_result: subprocess.CompletedProcess) -> None:
         """Compares the version of Fuchsia with the version of ffx.
 
         Args:
@@ -311,17 +320,17 @@
                 output mode enabled
         """
         result_json = json.loads(target_show_result.stdout)
-        build_info = next(
-            filter(lambda s: s.get('label') == 'build', result_json))
+        build_info = next(filter(lambda s: s.get("label") == "build", result_json))
         version_info = next(
-            filter(lambda s: s.get('label') == 'version', build_info['child']))
-        device_version = version_info.get('value')
-        ffx_version = self.run("version").stdout.decode('utf-8')
+            filter(lambda s: s.get("label") == "version", build_info["child"])
+        )
+        device_version = version_info.get("value")
+        ffx_version = self.run("version").stdout.decode("utf-8")
 
-        self.log.info(
-            f"Device version: {device_version}, ffx version: {ffx_version}")
+        self.log.info(f"Device version: {device_version}, ffx version: {ffx_version}")
         if device_version != ffx_version:
             self.log.warning(
-                "ffx versions that differ from device versions may" +
-                " have compatibility issues. It is recommended to" +
-                " use versions within 6 weeks of each other.")
+                "ffx versions that differ from device versions may"
+                + " have compatibility issues. It is recommended to"
+                + " use versions within 6 weeks of each other."
+            )
diff --git a/src/antlion/controllers/fuchsia_lib/hardware_power_statecontrol_lib.py b/src/antlion/controllers/fuchsia_lib/hardware_power_statecontrol_lib.py
index d6e030e..30af9a8 100644
--- a/src/antlion/controllers/fuchsia_lib/hardware_power_statecontrol_lib.py
+++ b/src/antlion/controllers/fuchsia_lib/hardware_power_statecontrol_lib.py
@@ -23,18 +23,19 @@
 
 
 class FuchsiaHardwarePowerStatecontrolLib(base_lib.BaseLib):
-
     def __init__(self, addr: str) -> None:
         super().__init__(addr, "hardware_power_statecontrol")
 
     def send_command(self, test_cmd, test_args, response_timeout=30):
         """Wrap send_command to allow disconnects after sending the request."""
         try:
-            response = super().send_command(test_cmd, test_args,
-                                            response_timeout)
-        except (TimeoutError, http.client.RemoteDisconnected,
-                base_lib.DeviceOffline) as e:
-            logging.warn(f'Error while sending power command: {e}')
+            response = super().send_command(test_cmd, test_args, response_timeout)
+        except (
+            TimeoutError,
+            http.client.RemoteDisconnected,
+            base_lib.DeviceOffline,
+        ) as e:
+            logging.warn(f"Error while sending power command: {e}")
             return
         return response
 
diff --git a/src/antlion/controllers/fuchsia_lib/lib_controllers/netstack_controller.py b/src/antlion/controllers/fuchsia_lib/lib_controllers/netstack_controller.py
index 9910be0..0ff858c 100644
--- a/src/antlion/controllers/fuchsia_lib/lib_controllers/netstack_controller.py
+++ b/src/antlion/controllers/fuchsia_lib/lib_controllers/netstack_controller.py
@@ -28,7 +28,8 @@
     def __init__(self, fuchsia_device):
         self.device = fuchsia_device
         self.log = logger.create_tagged_trace_logger(
-            'NetstackController for FuchsiaDevice | %s' % self.device.ip)
+            "NetstackController for FuchsiaDevice | %s" % self.device.ip
+        )
 
     def list_interfaces(self):
         """Retrieve netstack interfaces from netstack facade
@@ -38,8 +39,8 @@
             information
         """
         response = self.device.sl4f.netstack_lib.netstackListInterfaces()
-        if response.get('error'):
+        if response.get("error"):
             raise NetstackControllerError(
-                'Failed to get network interfaces list: %s' %
-                response['error'])
-        return response['result']
+                "Failed to get network interfaces list: %s" % response["error"]
+            )
+        return response["result"]
diff --git a/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_controller.py b/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_controller.py
index 176d54e..922b167 100644
--- a/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_controller.py
+++ b/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_controller.py
@@ -34,7 +34,8 @@
     def __init__(self, fuchsia_device):
         self.device = fuchsia_device
         self.log = logger.create_tagged_trace_logger(
-            'WlanController for FuchsiaDevice | %s' % self.device.ip)
+            "WlanController for FuchsiaDevice | %s" % self.device.ip
+        )
 
     # TODO(70501): Wrap wlan_lib functions and setup from FuchsiaDevice here
     # (similar to how WlanPolicyController does it) to prevent FuchsiaDevice
@@ -46,25 +47,26 @@
         pass
 
     def update_wlan_interfaces(self):
-        """ Retrieves WLAN interfaces from device and sets the FuchsiaDevice
+        """Retrieves WLAN interfaces from device and sets the FuchsiaDevice
         attributes.
         """
         wlan_interfaces = self.get_interfaces_by_role()
-        self.device.wlan_client_interfaces = wlan_interfaces['client']
-        self.device.wlan_ap_interfaces = wlan_interfaces['ap']
+        self.device.wlan_client_interfaces = wlan_interfaces["client"]
+        self.device.wlan_ap_interfaces = wlan_interfaces["ap"]
 
         # Set test interfaces to value from config, else the first found
         # interface, else None
         self.device.wlan_client_test_interface_name = self.device.conf_data.get(
-            'wlan_client_test_interface',
-            next(iter(self.device.wlan_client_interfaces), None))
+            "wlan_client_test_interface",
+            next(iter(self.device.wlan_client_interfaces), None),
+        )
 
         self.device.wlan_ap_test_interface_name = self.device.conf_data.get(
-            'wlan_ap_test_interface',
-            next(iter(self.device.wlan_ap_interfaces), None))
+            "wlan_ap_test_interface", next(iter(self.device.wlan_ap_interfaces), None)
+        )
 
     def get_interfaces_by_role(self):
-        """ Retrieves WLAN interface information, supplimented by netstack info.
+        """Retrieves WLAN interface information, supplimented by netstack info.
 
         Returns:
             Dict with keys 'client' and 'ap', each of which contain WLAN
@@ -73,32 +75,32 @@
 
         # Retrieve WLAN interface IDs
         response = self.device.sl4f.wlan_lib.wlanGetIfaceIdList()
-        if response.get('error'):
-            raise WlanControllerError('Failed to get WLAN iface ids: %s' %
-                                      response['error'])
+        if response.get("error"):
+            raise WlanControllerError(
+                "Failed to get WLAN iface ids: %s" % response["error"]
+            )
 
-        wlan_iface_ids = response.get('result', [])
+        wlan_iface_ids = response.get("result", [])
         if len(wlan_iface_ids) < 1:
-            return {'client': {}, 'ap': {}}
+            return {"client": {}, "ap": {}}
 
         # Use IDs to get WLAN interface info and mac addresses
         wlan_ifaces_by_mac = {}
         for id in wlan_iface_ids:
             response = self.device.sl4f.wlan_lib.wlanQueryInterface(id)
-            if response.get('error'):
+            if response.get("error"):
                 raise WlanControllerError(
-                    'Failed to query wlan iface id %s: %s' %
-                    (id, response['error']))
+                    "Failed to query wlan iface id %s: %s" % (id, response["error"])
+                )
 
-            mac = response['result'].get('sta_addr', None)
+            mac = response["result"].get("sta_addr", None)
             if mac is None:
                 # Fallback to older field name to maintain backwards
                 # compatibility with older versions of SL4F's
                 # QueryIfaceResponse. See https://fxrev.dev/562146.
-                mac = response['result'].get('mac_addr')
+                mac = response["result"].get("mac_addr")
 
-            wlan_ifaces_by_mac[utils.mac_address_list_to_str(
-                mac)] = response['result']
+            wlan_ifaces_by_mac[utils.mac_address_list_to_str(mac)] = response["result"]
 
         # Use mac addresses to query the interfaces from the netstack view,
         # which allows us to supplement the interface information with the name,
@@ -109,21 +111,21 @@
         # to reconcile some of the information between the two perspectives, at
         # which point we can eliminate step.
         net_ifaces = self.device.netstack_controller.list_interfaces()
-        wlan_ifaces_by_role = {'client': {}, 'ap': {}}
+        wlan_ifaces_by_role = {"client": {}, "ap": {}}
         for iface in net_ifaces:
             try:
                 # Some interfaces might not have a MAC
-                iface_mac = utils.mac_address_list_to_str(iface['mac'])
+                iface_mac = utils.mac_address_list_to_str(iface["mac"])
             except Exception as e:
-                self.log.debug(f'Error {e} getting MAC for iface {iface}')
+                self.log.debug(f"Error {e} getting MAC for iface {iface}")
                 continue
             if iface_mac in wlan_ifaces_by_mac:
-                wlan_ifaces_by_mac[iface_mac]['netstack_id'] = iface['id']
+                wlan_ifaces_by_mac[iface_mac]["netstack_id"] = iface["id"]
 
                 # Add to return dict, mapped by role then name.
-                wlan_ifaces_by_role[
-                    wlan_ifaces_by_mac[iface_mac]['role'].lower()][
-                        iface['name']] = wlan_ifaces_by_mac[iface_mac]
+                wlan_ifaces_by_role[wlan_ifaces_by_mac[iface_mac]["role"].lower()][
+                    iface["name"]
+                ] = wlan_ifaces_by_mac[iface_mac]
 
         return wlan_ifaces_by_role
 
@@ -138,45 +140,50 @@
             EnvironmentError - failure to get/set regulatory region
             ConnectionError - failure to query PHYs
         """
-        self.log.info('Setting DUT country code to %s' % country_code)
+        self.log.info("Setting DUT country code to %s" % country_code)
         country_code_response = self.device.sl4f.regulatory_region_lib.setRegion(
-            country_code)
-        if country_code_response.get('error'):
+            country_code
+        )
+        if country_code_response.get("error"):
             raise EnvironmentError(
-                'Failed to set country code (%s) on DUT. Error: %s' %
-                (country_code, country_code_response['error']))
+                "Failed to set country code (%s) on DUT. Error: %s"
+                % (country_code, country_code_response["error"])
+            )
 
-        self.log.info('Verifying DUT country code was correctly set to %s.' %
-                      country_code)
+        self.log.info(
+            "Verifying DUT country code was correctly set to %s." % country_code
+        )
         phy_ids_response = self.device.sl4f.wlan_lib.wlanPhyIdList()
-        if phy_ids_response.get('error'):
-            raise ConnectionError('Failed to get phy ids from DUT. Error: %s' %
-                                  (country_code, phy_ids_response['error']))
+        if phy_ids_response.get("error"):
+            raise ConnectionError(
+                "Failed to get phy ids from DUT. Error: %s"
+                % (country_code, phy_ids_response["error"])
+            )
 
         end_time = time.time() + TIME_TO_WAIT_FOR_COUNTRY_CODE
         while time.time() < end_time:
-            for id in phy_ids_response['result']:
-                get_country_response = self.device.sl4f.wlan_lib.wlanGetCountry(
-                    id)
-                if get_country_response.get('error'):
+            for id in phy_ids_response["result"]:
+                get_country_response = self.device.sl4f.wlan_lib.wlanGetCountry(id)
+                if get_country_response.get("error"):
                     raise ConnectionError(
-                        'Failed to query PHY ID (%s) for country. Error: %s' %
-                        (id, get_country_response['error']))
+                        "Failed to query PHY ID (%s) for country. Error: %s"
+                        % (id, get_country_response["error"])
+                    )
 
-                set_code = ''.join([
-                    chr(ascii_char)
-                    for ascii_char in get_country_response['result']
-                ])
+                set_code = "".join(
+                    [chr(ascii_char) for ascii_char in get_country_response["result"]]
+                )
                 if set_code != country_code:
                     self.log.debug(
-                        'PHY (id: %s) has incorrect country code set. '
-                        'Expected: %s, Got: %s' % (id, country_code, set_code))
+                        "PHY (id: %s) has incorrect country code set. "
+                        "Expected: %s, Got: %s" % (id, country_code, set_code)
+                    )
                     break
             else:
-                self.log.info('All PHYs have expected country code (%s)' %
-                              country_code)
+                self.log.info("All PHYs have expected country code (%s)" % country_code)
                 break
             time.sleep(TIME_TO_SLEEP_BETWEEN_RETRIES)
         else:
-            raise EnvironmentError('Failed to set DUT country code to %s.' %
-                                   country_code)
+            raise EnvironmentError(
+                "Failed to set DUT country code to %s." % country_code
+            )
diff --git a/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py b/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py
index 25f06b4..5ef126b 100644
--- a/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py
+++ b/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py
@@ -14,26 +14,27 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import subprocess
 import time
 
 from antlion import logger
 from antlion import signals
 
-from antlion.controllers.fuchsia_lib.ffx import FFX, FFXError, FFXTimeout
 from antlion.controllers.fuchsia_lib.sl4f import SL4F
+from antlion.controllers.fuchsia_lib.ssh import SSHProvider
 
 SAVED_NETWORKS = "saved_networks"
 CLIENT_STATE = "client_connections_state"
 CONNECTIONS_ENABLED = "ConnectionsEnabled"
 CONNECTIONS_DISABLED = "ConnectionsDisabled"
 
-STATE_CONNECTED = 'Connected'
-STATE_CONNECTING = 'Connecting'
-STATE_DISCONNECTED = 'Disconnected'
-STATE_CONNECTION_STOPPED = 'ConnectionStopped'
+STATE_CONNECTED = "Connected"
+STATE_CONNECTING = "Connecting"
+STATE_DISCONNECTED = "Disconnected"
+STATE_CONNECTION_STOPPED = "ConnectionStopped"
 
+SESSION_MANAGER_TIMEOUT_SEC = 10
 FUCHSIA_DEFAULT_WLAN_CONFIGURE_TIMEOUT = 30
+DEFAULT_GET_UPDATE_TIMEOUT = 60
 
 
 class WlanPolicyControllerError(signals.ControllerError):
@@ -43,104 +44,91 @@
 class WlanPolicyController:
     """Contains methods related to the wlan policy layer, to be used in the
     FuchsiaDevice object.
+
+    Attributes:
+        sl4f: sl4f module for communicating to the WLAN policy controller.
+        ssh: transport to fuchsia device to stop component processes.
     """
 
-    def __init__(self, sl4f: SL4F, ffx: FFX):
-        self.client_controller = False
+    def __init__(self, sl4f: SL4F, ssh: SSHProvider):
+        """
+        Args:
+            sl4f: sl4f module for communicating to the WLAN policy controller.
+            ssh: transport to fuchsia device to stop component processes.
+        """
         self.preserved_networks_and_client_state = None
         self.policy_configured = False
         self.sl4f = sl4f
-        self.ffx = ffx
+        self.ssh = ssh
         self.log = logger.create_tagged_trace_logger(
-            f'WlanPolicyController | {ffx.ip}')
+            f"WlanPolicyController | {self.ssh.config.host_name}"
+        )
 
-    # TODO(b/231252355): Lower default timeout to 15s once ffx becomes more
-    # performant and/or reliable.
     def configure_wlan(
-            self,
-            preserve_saved_networks: bool,
-            timeout_sec: int = FUCHSIA_DEFAULT_WLAN_CONFIGURE_TIMEOUT) -> None:
+        self,
+        preserve_saved_networks: bool,
+        timeout_sec: int = FUCHSIA_DEFAULT_WLAN_CONFIGURE_TIMEOUT,
+    ) -> None:
         """Sets up wlan policy layer.
 
         Args:
             preserve_saved_networks: whether to clear existing saved
                 networks and client state, to be restored at test close.
-            timeout: time to wait for device to configure WLAN.
+            timeout_sec: time to wait for device to configure WLAN.
         """
-        end_time_sec = time.time() + timeout_sec
 
-        # Kill basemgr (Component v1 version of session manager)
-        while time.time() < end_time_sec:
-            response = self.sl4f.basemgr_lib.killBasemgr()
-            if not response.get('error'):
-                self.log.debug('Basemgr kill call successfully issued.')
-                break
-            self.log.debug(response['error'])
-            time.sleep(1)
-        else:
-            raise WlanPolicyControllerError(
-                'Failed to issue successful basemgr kill call.')
-
-        # Stop the session manager, which also holds the Policy controller.
-        try:
-            result = self.ffx.run(
-                'component destroy /core/session-manager/session:session',
-                skip_status_code_check=True)
-
-            if result.returncode == 0:
-                self.log.debug(f"Stopped session: {result.stdout}.")
-            else:
-                if (b'InstanceNotFound' in result.stderr
-                        or b'instance was not found' in result.stderr
-                        or b'does not exist' in result.stderr):
-                    self.log.debug(f'Instance was not found: {result.stderr}.')
-                else:
-                    raise WlanPolicyControllerError(
-                        f'Failed to stop the session: {result.stderr}.')
-        except FFXTimeout or FFXError as e:
-            raise WlanPolicyControllerError from e
+        # We need to stop session manager to free control of
+        # fuchsia.wlan.policy.ClientController, which can only be used by a
+        # single caller at a time. SL4F needs the ClientController to trigger
+        # WLAN policy state changes. On eng builds the session_manager can be
+        # restarted after being stopped during reboot so we attempt killing the
+        # session manager process for 10 seconds.
+        # See https://cs.opensource.google/fuchsia/fuchsia/+/main:sdk/fidl/fuchsia.wlan.policy/client_provider.fidl
+        if "cast_agent.cm" in self.ssh.run("ps").stdout:
+            end_time_session_manager_sec = time.time() + SESSION_MANAGER_TIMEOUT_SEC
+            while time.time() < end_time_session_manager_sec:
+                self.ssh.stop_component("session_manager", is_cfv2_component=True)
 
         # Acquire control of policy layer
+        end_time_config_sec = time.time() + timeout_sec
         controller_errors = []
-        while time.time() < end_time_sec:
+        while time.time() < end_time_config_sec:
             # Create a client controller
             response = self.sl4f.wlan_policy_lib.wlanCreateClientController()
-            if response.get('error'):
-                controller_errors.append(response['error'])
-                self.log.debug(response['error'])
-                time.sleep(1)
-                continue
-            # Attempt to use the client controller (failure indicates a closed
-            # channel, meaning the client controller was rejected.
-            response = self.sl4f.wlan_policy_lib.wlanGetSavedNetworks()
-            if response.get('error'):
-                controller_errors.append(response['error'])
-                self.log.debug(response['error'])
+            if response.get("error"):
+                controller_errors.append(response["error"])
+                self.log.debug(response["error"])
                 time.sleep(1)
                 continue
             break
         else:
             self.log.warning(
                 "Failed to create and use a WLAN policy client controller. Errors: ["
-                + "; ".join(controller_errors) + "]")
+                + "; ".join(controller_errors)
+                + "]"
+            )
             raise WlanPolicyControllerError(
-                'Failed to create and use a WLAN policy client controller.')
+                "Failed to create and use a WLAN policy client controller."
+            )
 
-        self.log.info('ACTS tests now have control of the WLAN policy layer.')
+        self.log.info("ACTS tests now have control of the WLAN policy layer.")
 
         if preserve_saved_networks and not self.preserved_networks_and_client_state:
-            self.preserved_networks_and_client_state = self.remove_and_preserve_networks_and_client_state(
+            self.preserved_networks_and_client_state = (
+                self.remove_and_preserve_networks_and_client_state()
             )
         if not self.start_client_connections():
             raise WlanPolicyControllerError(
-                'Failed to start client connections during configuration.')
+                "Failed to start client connections during configuration."
+            )
 
         self.policy_configured = True
 
     def _deconfigure_wlan(self):
         if not self.stop_client_connections():
             raise WlanPolicyControllerError(
-                'Failed to stop client connections during deconfiguration.')
+                "Failed to stop client connections during deconfiguration."
+            )
         self.policy_configured = False
 
     def clean_up(self) -> None:
@@ -159,9 +147,10 @@
         Returns:
             True, if successful. False otherwise."""
         start_response = self.sl4f.wlan_policy_lib.wlanStartClientConnections()
-        if start_response.get('error'):
-            self.log.error('Failed to start client connections. Err: %s' %
-                           start_response['error'])
+        if start_response.get("error"):
+            self.log.error(
+                "Failed to start client connections. Err: %s" % start_response["error"]
+            )
             return False
         return True
 
@@ -172,14 +161,15 @@
         Returns:
             True, if successful. False otherwise."""
         stop_response = self.sl4f.wlan_policy_lib.wlanStopClientConnections()
-        if stop_response.get('error'):
-            self.log.error('Failed to stop client connections. Err: %s' %
-                           stop_response['error'])
+        if stop_response.get("error"):
+            self.log.error(
+                "Failed to stop client connections. Err: %s" % stop_response["error"]
+            )
             return False
         return True
 
     def save_and_connect(self, ssid, security, password=None, timeout=30):
-        """ Saves and connects to the network. This is the policy version of
+        """Saves and connects to the network. This is the policy version of
         connect and check_connect_response because the policy layer
         requires a saved network and the policy connect does not return
         success or failure
@@ -202,11 +192,7 @@
             return False
         return self.wait_for_connect(ssid, security, timeout=timeout)
 
-    def save_and_wait_for_autoconnect(self,
-                                      ssid,
-                                      security,
-                                      password=None,
-                                      timeout=30):
+    def save_and_wait_for_autoconnect(self, ssid, security, password=None, timeout=30):
         """Saves a network and waits, expecting an autoconnection to the newly
         saved network. This differes from save_and_connect, as it doesn't
         expressly trigger a connection first. There are cases in which an
@@ -227,13 +213,9 @@
             return False
         return self.wait_for_connect(ssid, security, timeout=timeout)
 
-    def remove_and_wait_for_disconnect(self,
-                                       ssid,
-                                       security_type,
-                                       password=None,
-                                       state=None,
-                                       status=None,
-                                       timeout=30):
+    def remove_and_wait_for_disconnect(
+        self, ssid, security_type, password=None, state=None, status=None, timeout=30
+    ):
         """Removes a single network and waits for a disconnect. It is not
         guaranteed the device will stay disconnected, as it may autoconnect
         to a different saved network.
@@ -254,25 +236,30 @@
         self.sl4f.wlan_policy_lib.wlanSetNewListener()
         if not self.remove_network(ssid, security_type, password=password):
             return False
-        return self.wait_for_disconnect(ssid,
-                                        security_type,
-                                        state=state,
-                                        status=status,
-                                        timeout=timeout)
+        return self.wait_for_disconnect(
+            ssid, security_type, state=state, status=status, timeout=timeout
+        )
 
-    def remove_all_networks_and_wait_for_no_connections(self, timeout=30):
+    def remove_all_networks_and_wait_for_no_connections(
+        self, timeout_sec: int = DEFAULT_GET_UPDATE_TIMEOUT
+    ) -> bool:
         """Removes all networks and waits until device is not connected to any
         networks. This should be used as the policy version of disconnect.
 
+        Args:
+            timeout_sec: The time to wait to see no connections.
+
         Returns:
             True, if successful. False otherwise.
         """
         self.sl4f.wlan_policy_lib.wlanSetNewListener()
         if not self.remove_all_networks():
-            self.log.error('Failed to remove all networks. Cannot continue to '
-                           'wait_for_no_connections.')
+            self.log.error(
+                "Failed to remove all networks. Cannot continue to "
+                "wait_for_no_connections."
+            )
             return False
-        return self.wait_for_no_connections(timeout=timeout)
+        return self.wait_for_no_connections(timeout_sec=timeout_sec)
 
     def save_network(self, ssid, security_type, password=None):
         """Save a network via the policy layer.
@@ -286,10 +273,13 @@
             True, if successful. False otherwise.
         """
         save_response = self.sl4f.wlan_policy_lib.wlanSaveNetwork(
-            ssid, security_type, target_pwd=password)
-        if save_response.get('error'):
-            self.log.error('Failed to save network %s with error: %s' %
-                           (ssid, save_response['error']))
+            ssid, security_type, target_pwd=password
+        )
+        if save_response.get("error"):
+            self.log.error(
+                "Failed to save network %s with error: %s"
+                % (ssid, save_response["error"])
+            )
             return False
         return True
 
@@ -305,10 +295,13 @@
             True, if successful. False otherwise.
         """
         remove_response = self.sl4f.wlan_policy_lib.wlanRemoveNetwork(
-            ssid, security_type, target_pwd=password)
-        if remove_response.get('error'):
-            self.log.error('Failed to remove network %s with error: %s' %
-                           (ssid, remove_response['error']))
+            ssid, security_type, target_pwd=password
+        )
+        if remove_response.get("error"):
+            self.log.error(
+                "Failed to remove network %s with error: %s"
+                % (ssid, remove_response["error"])
+            )
             return False
         return True
 
@@ -319,9 +312,11 @@
             True, if successful. False otherwise.
         """
         remove_all_response = self.sl4f.wlan_policy_lib.wlanRemoveAllNetworks()
-        if remove_all_response.get('error'):
-            self.log.error('Error occurred removing all networks: %s' %
-                           remove_all_response['error'])
+        if remove_all_response.get("error"):
+            self.log.error(
+                "Error occurred removing all networks: %s"
+                % remove_all_response["error"]
+            )
             return False
         return True
 
@@ -334,13 +329,13 @@
         Raises:
             WlanPolicyControllerError, if retrieval fails.
         """
-        saved_networks_response = self.sl4f.wlan_policy_lib.wlanGetSavedNetworks(
-        )
-        if saved_networks_response.get('error'):
+        saved_networks_response = self.sl4f.wlan_policy_lib.wlanGetSavedNetworks()
+        if saved_networks_response.get("error"):
             raise WlanPolicyControllerError(
-                'Failed to retrieve saved networks: %s' %
-                saved_networks_response['error'])
-        return saved_networks_response['result']
+                "Failed to retrieve saved networks: %s"
+                % saved_networks_response["error"]
+            )
+        return saved_networks_response["result"]
 
     def send_connect_command(self, ssid, security_type):
         """Sends a connect command to a network that is already saved. This does
@@ -355,17 +350,17 @@
         Returns:
             True, if command send successfully. False otherwise.
         """
-        connect_response = self.sl4f.wlan_policy_lib.wlanConnect(
-            ssid, security_type)
-        if connect_response.get('error'):
+        connect_response = self.sl4f.wlan_policy_lib.wlanConnect(ssid, security_type)
+        if connect_response.get("error"):
             self.log.error(
-                'Error occurred when sending policy connect command: %s' %
-                connect_response['error'])
+                "Error occurred when sending policy connect command: %s"
+                % connect_response["error"]
+            )
             return False
         return True
 
     def wait_for_connect(self, ssid, security_type, timeout=30):
-        """ Wait until the device has connected to the specified network.
+        """Wait until the device has connected to the specified network.
         Args:
             ssid: string, the network name
             security: string, security type of network (see sl4f.wlan_policy_lib)
@@ -380,45 +375,47 @@
             time_left = max(1, int(end_time - time.time()))
 
             try:
-                update = self.sl4f.wlan_policy_lib.wlanGetUpdate(
-                    timeout=time_left)
+                update = self.sl4f.wlan_policy_lib.wlanGetUpdate(timeout=time_left)
             except TimeoutError:
-                self.log.error('Timed out waiting for response from device '
-                               'while waiting for network with SSID "%s" to '
-                               'connect. Device took too long to connect or '
-                               'the request timed out for another reason.' %
-                               ssid)
+                self.log.error(
+                    "Timed out waiting for response from device "
+                    'while waiting for network with SSID "%s" to '
+                    "connect. Device took too long to connect or "
+                    "the request timed out for another reason." % ssid
+                )
                 self.sl4f.wlan_policy_lib.wlanSetNewListener()
                 return False
-            if update.get('error'):
+            if update.get("error"):
                 # This can occur for many reasons, so it is not necessarily a
                 # failure.
-                self.log.debug('Error occurred getting status update: %s' %
-                               update['error'])
+                self.log.debug(
+                    "Error occurred getting status update: %s" % update["error"]
+                )
                 continue
 
-            for network in update['result']['networks']:
-                if network['id']['ssid'] == ssid or network['id'][
-                        'type_'].lower() == security_type.lower():
-                    if 'state' not in network:
+            for network in update["result"]["networks"]:
+                if (
+                    network["id"]["ssid"] == ssid
+                    or network["id"]["type_"].lower() == security_type.lower()
+                ):
+                    if "state" not in network:
                         raise WlanPolicyControllerError(
-                            'WLAN status missing state field.')
-                    elif network['state'].lower() == STATE_CONNECTED.lower():
+                            "WLAN status missing state field."
+                        )
+                    elif network["state"].lower() == STATE_CONNECTED.lower():
                         return True
             # Wait a bit before requesting another status update
             time.sleep(1)
         # Stopped getting updates because out timeout
-        self.log.error('Timed out waiting for network with SSID "%s" to '
-                       "connect" % ssid)
+        self.log.error(
+            'Timed out waiting for network with SSID "%s" to ' "connect" % ssid
+        )
         return False
 
-    def wait_for_disconnect(self,
-                            ssid,
-                            security_type,
-                            state=None,
-                            status=None,
-                            timeout=30):
-        """ Wait for a disconnect of the specified network on the given device. This
+    def wait_for_disconnect(
+        self, ssid, security_type, state=None, status=None, timeout=30
+    ):
+        """Wait for a disconnect of the specified network on the given device. This
         will check that the correct connection state and disconnect status are
         given in update. If we do not see a disconnect after some time,
         return false.
@@ -443,69 +440,78 @@
         while time.time() < end_time:
             time_left = max(1, int(end_time - time.time()))
             try:
-                update = self.sl4f.wlan_policy_lib.wlanGetUpdate(
-                    timeout=time_left)
+                update = self.sl4f.wlan_policy_lib.wlanGetUpdate(timeout=time_left)
             except TimeoutError:
                 self.log.error(
-                    'Timed out waiting for response from device '
+                    "Timed out waiting for response from device "
                     'while waiting for network with SSID "%s" to '
-                    'disconnect. Device took too long to disconnect '
-                    'or the request timed out for another reason.' % ssid)
+                    "disconnect. Device took too long to disconnect "
+                    "or the request timed out for another reason." % ssid
+                )
                 self.sl4f.wlan_policy_lib.wlanSetNewListener()
                 return False
 
-            if update.get('error'):
+            if update.get("error"):
                 # This can occur for many reasons, so it is not necessarily a
                 # failure.
-                self.log.debug('Error occurred getting status update: %s' %
-                               update['error'])
+                self.log.debug(
+                    "Error occurred getting status update: %s" % update["error"]
+                )
                 continue
             # Update should include network, either connected to or recently disconnected.
-            if len(update['result']['networks']) == 0:
-                raise WlanPolicyControllerError(
-                    'WLAN state update is missing network.')
+            if len(update["result"]["networks"]) == 0:
+                raise WlanPolicyControllerError("WLAN state update is missing network.")
 
-            for network in update['result']['networks']:
-                if network['id']['ssid'] == ssid or network['id'][
-                        'type_'].lower() == security_type.lower():
-                    if 'state' not in network or 'status' not in network:
+            for network in update["result"]["networks"]:
+                if (
+                    network["id"]["ssid"] == ssid
+                    or network["id"]["type_"].lower() == security_type.lower()
+                ):
+                    if "state" not in network or "status" not in network:
                         raise WlanPolicyControllerError(
-                            'Client state summary\'s network is missing fields'
+                            "Client state summary's network is missing fields"
                         )
                     # If still connected, we will wait for another update and check again
-                    elif network['state'].lower() == STATE_CONNECTED.lower():
+                    elif network["state"].lower() == STATE_CONNECTED.lower():
                         continue
-                    elif network['state'].lower() == STATE_CONNECTING.lower():
+                    elif network["state"].lower() == STATE_CONNECTING.lower():
                         self.log.error(
                             'Update is "Connecting", but device should already be '
-                            'connected; expected disconnect')
+                            "connected; expected disconnect"
+                        )
                         return False
                     # Check that the network state and disconnect status are expected, ie
                     # that it isn't ConnectionFailed when we expect ConnectionStopped
-                    elif network['state'].lower() != state.lower(
-                    ) or network['status'].lower() != status.lower():
+                    elif (
+                        network["state"].lower() != state.lower()
+                        or network["status"].lower() != status.lower()
+                    ):
                         self.log.error(
-                            'Connection failed: a network failure occurred that is unrelated'
-                            'to remove network or incorrect status update. \nExpected state: '
-                            '%s, Status: %s,\nActual update: %s' %
-                            (state, status, network))
+                            "Connection failed: a network failure occurred that is unrelated"
+                            "to remove network or incorrect status update. \nExpected state: "
+                            "%s, Status: %s,\nActual update: %s"
+                            % (state, status, network)
+                        )
                         return False
                     else:
                         return True
             # Wait a bit before requesting another status update
             time.sleep(1)
         # Stopped getting updates because out timeout
-        self.log.error('Timed out waiting for network with SSID "%s" to '
-                       'connect' % ssid)
+        self.log.error(
+            'Timed out waiting for network with SSID "%s" to ' "connect" % ssid
+        )
         return False
 
-    def wait_for_no_connections(self, timeout=30):
-        """ Waits to see that there are no existing connections the device. This
+    def wait_for_no_connections(
+        self, timeout_sec: int = DEFAULT_GET_UPDATE_TIMEOUT
+    ) -> bool:
+        """Waits to see that there are no existing connections the device. This
         is the simplest way to watch for disconnections when only a single
         network is saved/present.
 
         Args:
-            timeout: int, time in seconds to wait to see no connections
+            timeout_sec: The time to wait to see no connections.
 
         Returns:
             True, if successful. False, if still connected after timeout.
@@ -514,16 +520,16 @@
         # then an update won't be generated by the device, and we'll time out.
         # Force an update by getting a new listener.
         self.sl4f.wlan_policy_lib.wlanSetNewListener()
-        end_time = time.time() + timeout
+        end_time = time.time() + timeout_sec
         while time.time() < end_time:
             time_left = max(1, int(end_time - time.time()))
             try:
-                update = self.sl4f.wlan_policy_lib.wlanGetUpdate(
-                    timeout=time_left)
+                update = self.sl4f.wlan_policy_lib.wlanGetUpdate(timeout=time_left)
             except TimeoutError:
                 self.log.info(
                     "Timed out getting status update while waiting for all"
-                    " connections to end.")
+                    " connections to end."
+                )
                 self.sl4f.wlan_policy_lib.wlanSetNewListener()
                 return False
 
@@ -532,17 +538,18 @@
                 return False
             # If any network is connected or being connected to, wait for them
             # to disconnect.
-            if any(network['state'].lower() in
-                   {STATE_CONNECTED.lower(),
-                    STATE_CONNECTING.lower()}
-                   for network in update['result']['networks']):
+            if any(
+                network["state"].lower()
+                in {STATE_CONNECTED.lower(), STATE_CONNECTING.lower()}
+                for network in update["result"]["networks"]
+            ):
                 continue
             else:
                 return True
         return False
 
     def remove_and_preserve_networks_and_client_state(self):
-        """ Preserves networks already saved on devices before removing them to
+        """Preserves networks already saved on devices before removing them to
         setup up for a clean test environment. Records the state of client
         connections before tests.
 
@@ -551,47 +558,49 @@
         """
         # Save preexisting saved networks
         preserved_networks_and_state = {}
-        saved_networks_response = self.sl4f.wlan_policy_lib.wlanGetSavedNetworks(
-        )
-        if saved_networks_response.get('error'):
+        saved_networks_response = self.sl4f.wlan_policy_lib.wlanGetSavedNetworks()
+        if saved_networks_response.get("error"):
             raise WlanPolicyControllerError(
-                'Failed to get preexisting saved networks: %s' %
-                saved_networks_response['error'])
-        if saved_networks_response.get('result') != None:
-            preserved_networks_and_state[
-                SAVED_NETWORKS] = saved_networks_response['result']
+                "Failed to get preexisting saved networks: %s"
+                % saved_networks_response["error"]
+            )
+        if saved_networks_response.get("result") != None:
+            preserved_networks_and_state[SAVED_NETWORKS] = saved_networks_response[
+                "result"
+            ]
 
         # Remove preexisting saved networks
         if not self.remove_all_networks():
             raise WlanPolicyControllerError(
-                'Failed to clear networks and disconnect at FuchsiaDevice creation.'
+                "Failed to clear networks and disconnect at FuchsiaDevice creation."
             )
 
         self.sl4f.wlan_policy_lib.wlanSetNewListener()
         update_response = self.sl4f.wlan_policy_lib.wlanGetUpdate()
-        update_result = update_response.get('result', {})
-        if update_result.get('state'):
-            preserved_networks_and_state[CLIENT_STATE] = update_result['state']
+        update_result = update_response.get("result", {})
+        if update_result.get("state"):
+            preserved_networks_and_state[CLIENT_STATE] = update_result["state"]
         else:
-            self.log.warn('Failed to get update; test will not start or '
-                          'stop client connections at the end of the test.')
+            self.log.warn(
+                "Failed to get update; test will not start or "
+                "stop client connections at the end of the test."
+            )
 
-        self.log.info('Saved networks cleared and preserved.')
+        self.log.info("Saved networks cleared and preserved.")
         return preserved_networks_and_state
 
     def restore_preserved_networks_and_client_state(self):
-        """ Restore saved networks and client state onto device if they have
+        """Restore saved networks and client state onto device if they have
         been preserved.
         """
         if not self.remove_all_networks():
-            self.log.warn('Failed to remove saved networks before restore.')
+            self.log.warn("Failed to remove saved networks before restore.")
         restore_success = True
-        for network in self.preserved_networks_and_client_state[
-                SAVED_NETWORKS]:
-            if not self.save_network(network["ssid"], network["security_type"],
-                                     network["credential_value"]):
-                self.log.warn('Failed to restore network (%s).' %
-                              network['ssid'])
+        for network in self.preserved_networks_and_client_state[SAVED_NETWORKS]:
+            if not self.save_network(
+                network["ssid"], network["security_type"], network["credential_value"]
+            ):
+                self.log.warn("Failed to restore network (%s)." % network["ssid"])
                 restore_success = False
         starting_state = self.preserved_networks_and_client_state[CLIENT_STATE]
         if starting_state == CONNECTIONS_ENABLED:
@@ -599,9 +608,9 @@
         else:
             state_restored = self.stop_client_connections()
         if not state_restored:
-            self.log.warn('Failed to restore client connections state.')
+            self.log.warn("Failed to restore client connections state.")
             restore_success = False
         if restore_success:
-            self.log.info('Preserved networks and client state restored.')
+            self.log.info("Preserved networks and client state restored.")
             self.preserved_networks_and_client_state = None
         return restore_success
diff --git a/src/antlion/controllers/fuchsia_lib/location/regulatory_region_lib.py b/src/antlion/controllers/fuchsia_lib/location/regulatory_region_lib.py
index 7b5c692..54d9e44 100644
--- a/src/antlion/controllers/fuchsia_lib/location/regulatory_region_lib.py
+++ b/src/antlion/controllers/fuchsia_lib/location/regulatory_region_lib.py
@@ -18,7 +18,6 @@
 
 
 class FuchsiaRegulatoryRegionLib(BaseLib):
-
     def __init__(self, addr: str) -> None:
         super().__init__(addr, "location_regulatory_region")
 
diff --git a/src/antlion/controllers/fuchsia_lib/logging_lib.py b/src/antlion/controllers/fuchsia_lib/logging_lib.py
index aba1acf..83825c4 100644
--- a/src/antlion/controllers/fuchsia_lib/logging_lib.py
+++ b/src/antlion/controllers/fuchsia_lib/logging_lib.py
@@ -20,7 +20,6 @@
 
 
 class FuchsiaLoggingLib(BaseLib):
-
     def __init__(self, addr: str) -> None:
         super().__init__(addr, "logging")
 
@@ -35,7 +34,7 @@
         """
         test_cmd = "logging_facade.LogErr"
         test_args = {
-            "message": '[%s] %s' % (datetime.datetime.now(), message),
+            "message": "[%s] %s" % (datetime.datetime.now(), message),
         }
 
         return self.send_command(test_cmd, test_args)
@@ -50,7 +49,7 @@
             Dictionary, None if success, error if error.
         """
         test_cmd = "logging_facade.LogInfo"
-        test_args = {"message": '[%s] %s' % (datetime.datetime.now(), message)}
+        test_args = {"message": "[%s] %s" % (datetime.datetime.now(), message)}
 
         return self.send_command(test_cmd, test_args)
 
@@ -64,6 +63,6 @@
             Dictionary, None if success, error if error.
         """
         test_cmd = "logging_facade.LogWarn"
-        test_args = {"message": '[%s] %s' % (datetime.datetime.now(), message)}
+        test_args = {"message": "[%s] %s" % (datetime.datetime.now(), message)}
 
         return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/netstack/netstack_lib.py b/src/antlion/controllers/fuchsia_lib/netstack/netstack_lib.py
index 20893b2..481e9bd 100644
--- a/src/antlion/controllers/fuchsia_lib/netstack/netstack_lib.py
+++ b/src/antlion/controllers/fuchsia_lib/netstack/netstack_lib.py
@@ -18,7 +18,6 @@
 
 
 class FuchsiaNetstackLib(BaseLib):
-
     def __init__(self, addr: str) -> None:
         super().__init__(addr, "netstack")
 
diff --git a/src/antlion/controllers/fuchsia_lib/package_server.py b/src/antlion/controllers/fuchsia_lib/package_server.py
index b0a45c7..d497e96 100644
--- a/src/antlion/controllers/fuchsia_lib/package_server.py
+++ b/src/antlion/controllers/fuchsia_lib/package_server.py
@@ -31,8 +31,8 @@
 from antlion import signals
 from antlion import utils
 
-from antlion.controllers.fuchsia_lib.ssh import FuchsiaSSHError, SSHProvider
-from antlion.controllers.fuchsia_lib.utils_lib import wait_for_port
+from antlion.controllers.fuchsia_lib.ssh import SSHError, SSHProvider
+from antlion.net import wait_for_port
 from antlion.tracelogger import TraceLogger
 
 DEFAULT_FUCHSIA_REPO_NAME = "fuchsia.com"
@@ -45,13 +45,14 @@
 
 def random_port() -> int:
     s = socket.socket()
-    s.bind(('', 0))
+    s.bind(("", 0))
     return s.getsockname()[1]
 
 
 @dataclass
 class Route:
     """Represent a route in the routing table."""
+
     preferred_source: Optional[str]
 
 
@@ -77,9 +78,9 @@
     Returns:
         Routes with destination to dest_ip.
     """
-    resp = subprocess.run(f"ip -json route get {dest_ip}".split(),
-                          capture_output=True,
-                          check=True)
+    resp = subprocess.run(
+        f"ip -json route get {dest_ip}".split(), capture_output=True, check=True
+    )
     routes = json.loads(resp.stdout)
     return [Route(r.get("prefsrc")) for r in routes]
 
@@ -105,7 +106,8 @@
     routes = find_routes_to(device_ip)
     if len(routes) != 1:
         raise PackageServerError(
-            f"Expected only one route to {device_ip}, got {routes}")
+            f"Expected only one route to {device_ip}, got {routes}"
+        )
 
     route = routes[0]
     if not route.preferred_source:
@@ -150,14 +152,13 @@
         Raises:
             TestAbortClass: when the timestamp.json file has expired
         """
-        with open(f'{self._packages_path}/repository/timestamp.json',
-                  'r') as f:
+        with open(f"{self._packages_path}/repository/timestamp.json", "r") as f:
             data = json.load(f)
             expiresAtRaw = data["signed"]["expires"]
-            expiresAt = datetime.strptime(expiresAtRaw, '%Y-%m-%dT%H:%M:%SZ')
+            expiresAt = datetime.strptime(expiresAtRaw, "%Y-%m-%dT%H:%M:%SZ")
             if expiresAt <= datetime.now():
                 raise signals.TestAbortClass(
-                    f'{self._packages_path}/repository/timestamp.json has expired on {expiresAtRaw}'
+                    f"{self._packages_path}/repository/timestamp.json has expired on {expiresAtRaw}"
                 )
 
     def start(self) -> None:
@@ -171,36 +172,39 @@
             )
             return
 
-        pm_command = f'{self._binary_path} serve -c 2 -repo {self._packages_path} -l :{self._port}'
+        pm_command = f"{self._binary_path} serve -c 2 -repo {self._packages_path} -l :{self._port}"
 
         root_dir = context.get_current_context().get_full_output_path()
         epoch = utils.get_current_epoch_time()
         time_stamp = logger.normalize_log_line_timestamp(
-            logger.epoch_to_log_line_timestamp(epoch))
-        self._log_path = os.path.join(root_dir, f'pm_server.{time_stamp}.log')
+            logger.epoch_to_log_line_timestamp(epoch)
+        )
+        self._log_path = os.path.join(root_dir, f"pm_server.{time_stamp}.log")
 
-        self._server_log = open(self._log_path, 'a+')
-        self._server_proc = subprocess.Popen(pm_command.split(),
-                                             preexec_fn=os.setpgrp,
-                                             stdout=self._server_log,
-                                             stderr=subprocess.STDOUT)
+        self._server_log = open(self._log_path, "a+")
+        self._server_proc = subprocess.Popen(
+            pm_command.split(),
+            preexec_fn=os.setpgrp,
+            stdout=self._server_log,
+            stderr=subprocess.STDOUT,
+        )
         try:
-            wait_for_port('127.0.0.1', self._port)
+            wait_for_port("127.0.0.1", self._port)
         except TimeoutError as e:
             if self._server_log:
                 self._server_log.close()
             if self._log_path:
-                with open(self._log_path, 'r') as f:
+                with open(self._log_path, "r") as f:
                     logs = f.read()
             raise TimeoutError(
                 f"pm serve failed to expose port {self._port}. Logs:\n{logs}"
             ) from e
 
-        self.log.info(f'Serving packages on port {self._port}')
+        self.log.info(f"Serving packages on port {self._port}")
 
-    def configure_device(self,
-                         ssh: SSHProvider,
-                         repo_name=DEFAULT_FUCHSIA_REPO_NAME) -> None:
+    def configure_device(
+        self, ssh: SSHProvider, repo_name=DEFAULT_FUCHSIA_REPO_NAME
+    ) -> None:
         """Configure the device to use this package server.
 
         Args:
@@ -209,16 +213,15 @@
         """
         # Remove any existing repositories that may be stale.
         try:
-            ssh.run(f'pkgctl repo rm fuchsia-pkg://{repo_name}')
-        except FuchsiaSSHError as e:
-            if 'NOT_FOUND' not in e.result.stderr:
+            ssh.run(f"pkgctl repo rm fuchsia-pkg://{repo_name}")
+        except SSHError as e:
+            if "NOT_FOUND" not in e.result.stderr:
                 raise e
 
         # Configure the device with the new repository.
         host_ip = find_host_ip(ssh.config.host_name)
         repo_url = f"http://{host_ip}:{self._port}"
-        ssh.run(
-            f"pkgctl repo add url -f 2 -n {repo_name} {repo_url}/config.json")
+        ssh.run(f"pkgctl repo add url -f 2 -n {repo_name} {repo_url}/config.json")
         self.log.info(
             f'Added repo "{repo_name}" as {repo_url} on device {ssh.config.host_name}'
         )
@@ -227,7 +230,8 @@
         """Stop the package server."""
         if not self._server_proc:
             self.log.warn(
-                "Skipping to stop the server since it hasn't been started yet")
+                "Skipping to stop the server since it hasn't been started yet"
+            )
             return
 
         self._server_proc.terminate()
diff --git a/src/antlion/controllers/fuchsia_lib/sl4f.py b/src/antlion/controllers/fuchsia_lib/sl4f.py
index 1958772..e051d7c 100644
--- a/src/antlion/controllers/fuchsia_lib/sl4f.py
+++ b/src/antlion/controllers/fuchsia_lib/sl4f.py
@@ -18,29 +18,26 @@
 import sys
 
 from antlion import logger
-from antlion.controllers.fuchsia_lib import utils_lib
-from antlion.controllers.fuchsia_lib.audio_lib import FuchsiaAudioLib
-from antlion.controllers.fuchsia_lib.basemgr_lib import FuchsiaBasemgrLib
-from antlion.controllers.fuchsia_lib.bt.avdtp_lib import FuchsiaAvdtpLib
-from antlion.controllers.fuchsia_lib.bt.ble_lib import FuchsiaBleLib
-from antlion.controllers.fuchsia_lib.bt.bts_lib import FuchsiaBtsLib
-from antlion.controllers.fuchsia_lib.bt.gattc_lib import FuchsiaGattcLib
-from antlion.controllers.fuchsia_lib.bt.gatts_lib import FuchsiaGattsLib
-from antlion.controllers.fuchsia_lib.bt.hfp_lib import FuchsiaHfpLib
-from antlion.controllers.fuchsia_lib.bt.rfcomm_lib import FuchsiaRfcommLib
-from antlion.controllers.fuchsia_lib.bt.sdp_lib import FuchsiaProfileServerLib
-from antlion.controllers.fuchsia_lib.hardware_power_statecontrol_lib import FuchsiaHardwarePowerStatecontrolLib
-from antlion.controllers.fuchsia_lib.location.regulatory_region_lib import FuchsiaRegulatoryRegionLib
+from antlion.controllers.fuchsia_lib.device_lib import DeviceLib
+from antlion.controllers.fuchsia_lib.hardware_power_statecontrol_lib import (
+    FuchsiaHardwarePowerStatecontrolLib,
+)
+from antlion.controllers.fuchsia_lib.location.regulatory_region_lib import (
+    FuchsiaRegulatoryRegionLib,
+)
 from antlion.controllers.fuchsia_lib.logging_lib import FuchsiaLoggingLib
 from antlion.controllers.fuchsia_lib.netstack.netstack_lib import FuchsiaNetstackLib
-from antlion.controllers.fuchsia_lib.ssh import SSHProvider, FuchsiaSSHError
+from antlion.controllers.fuchsia_lib.ssh import SSHProvider, SSHError
 from antlion.controllers.fuchsia_lib.wlan_ap_policy_lib import FuchsiaWlanApPolicyLib
-from antlion.controllers.fuchsia_lib.wlan_deprecated_configuration_lib import FuchsiaWlanDeprecatedConfigurationLib
+from antlion.controllers.fuchsia_lib.wlan_deprecated_configuration_lib import (
+    FuchsiaWlanDeprecatedConfigurationLib,
+)
 from antlion.controllers.fuchsia_lib.wlan_lib import FuchsiaWlanLib
 from antlion.controllers.fuchsia_lib.wlan_policy_lib import FuchsiaWlanPolicyLib
+from antlion.net import wait_for_port
 
 DEFAULT_SL4F_PORT = 80
-START_SL4F_V2_CMD = 'start_sl4f'
+START_SL4F_V2_CMD = "start_sl4f"
 
 
 class SL4F:
@@ -52,78 +49,65 @@
         log: Logger for the device-specific instance of SL4F.
     """
 
-    def __init__(self, ssh: SSHProvider,
-                 port: int = DEFAULT_SL4F_PORT) -> None:
+    def __init__(self, ssh: SSHProvider, port: int = DEFAULT_SL4F_PORT) -> None:
         """
         Args:
             ssh: SSHProvider transport to start and stop SL4F.
             port: Port for the SL4F server to listen on.
         """
+        host = ssh.config.host_name
+
         if sys.version_info < (3, 9):
             # TODO(http://b/261746355): Remove this if statement once the
             # minimum Python version is 3.9 or newer.
-            host = ipaddress.ip_address(ssh.config.host_name.split('%')[0])
-            if host.version == 4:
-                self.address = f'http://{host}:{port}'
-            elif host.version == 6:
-                host = ssh.config.host_name
-                self.address = f'http://[{host}]:{port}'
+            ip = ipaddress.ip_address(host.split("%")[0])
+            if ip.version == 4:
+                self.address = f"http://{ip}:{port}"
+            elif ip.version == 6:
+                ip = ssh.config.host_name
+                self.address = f"http://[{ip}]:{port}"
         else:
-            host = ipaddress.ip_address(ssh.config.host_name)
-            if host.version == 4:
-                self.address = f'http://{host}:{port}'
-            elif host.version == 6:
-                self.address = f'http://[{host}]:{port}'
+            ip = ipaddress.ip_address(host)
+            if ip.version == 4:
+                self.address = f"http://{ip}:{port}"
+            elif ip.version == 6:
+                self.address = f"http://[{ip}]:{port}"
 
         self.log = logger.create_tagged_trace_logger(f"SL4F | {self.address}")
 
         try:
+            ssh.stop_component("sl4f")
             ssh.run(START_SL4F_V2_CMD).stdout
-        except FuchsiaSSHError:
+        except SSHError:
             # TODO(fxbug.dev/99331) Remove support to run SL4F in CFv1 mode
             # once ACTS no longer use images that comes with only CFv1 SL4F.
             self.log.warn(
                 "Running SL4F in CFv1 mode, "
                 "this is deprecated for images built after 5/9/2022, "
-                "see https://fxbug.dev/77056 for more info.")
-            ssh.stop_v1_component("sl4f")
+                "see https://fxbug.dev/77056 for more info."
+            )
+            ssh.stop_component("sl4f")
             ssh.start_v1_component("sl4f")
 
-        utils_lib.wait_for_port(str(host), port)
+        try:
+            wait_for_port(host, port)
+            self.log.info("SL4F server is reachable")
+        except TimeoutError as e:
+            raise TimeoutError("SL4F server is unreachable") from e
+
         self._init_libraries()
-        self._verify_sl4f_connection()
 
     def _init_libraries(self) -> None:
-        # Grab commands from FuchsiaAudioLib
-        self.audio_lib = FuchsiaAudioLib(self.address)
-
-        # Grab commands from FuchsiaAvdtpLib
-        self.avdtp_lib = FuchsiaAvdtpLib(self.address)
-
-        # Grab commands from FuchsiaHfpLib
-        self.hfp_lib = FuchsiaHfpLib(self.address)
-
-        # Grab commands from FuchsiaRfcommLib
-        self.rfcomm_lib = FuchsiaRfcommLib(self.address)
-
-        # Grab commands from FuchsiaBasemgrLib
-        self.basemgr_lib = FuchsiaBasemgrLib(self.address)
-
-        # Grab commands from FuchsiaBleLib
-        self.ble_lib = FuchsiaBleLib(self.address)
-
-        # Grab commands from FuchsiaBtsLib
-        self.bts_lib = FuchsiaBtsLib(self.address)
-
-        # Grab commands from FuchsiaGattcLib
-        self.gattc_lib = FuchsiaGattcLib(self.address)
-
-        # Grab commands from FuchsiaGattsLib
-        self.gatts_lib = FuchsiaGattsLib(self.address)
+        # Grab commands from DeviceLib
+        self.device_lib = DeviceLib(self.address)
 
         # Grab commands from FuchsiaHardwarePowerStatecontrolLib
-        self.hardware_power_statecontrol_lib = (
-            FuchsiaHardwarePowerStatecontrolLib(self.address))
+        self.hardware_power_statecontrol_lib = FuchsiaHardwarePowerStatecontrolLib(
+            self.address
+        )
+
+        # Grab commands from FuchsiaRegulatoryRegionLib
+        self.regulatory_region_lib = FuchsiaRegulatoryRegionLib(self.address)
 
         # Grab commands from FuchsiaLoggingLib
         self.logging_lib = FuchsiaLoggingLib(self.address)
@@ -131,31 +115,16 @@
         # Grab commands from FuchsiaNetstackLib
         self.netstack_lib = FuchsiaNetstackLib(self.address)
 
-        # Grab commands from FuchsiaProfileServerLib
-        self.sdp_lib = FuchsiaProfileServerLib(self.address)
-
-        # Grab commands from FuchsiaRegulatoryRegionLib
-        self.regulatory_region_lib = FuchsiaRegulatoryRegionLib(self.address)
+        # Grab commands from FuchsiaWlanApPolicyLib
+        self.wlan_ap_policy_lib = FuchsiaWlanApPolicyLib(self.address)
 
         # Grabs command from FuchsiaWlanDeprecatedConfigurationLib
-        self.wlan_deprecated_configuration_lib = (
-            FuchsiaWlanDeprecatedConfigurationLib(self.address))
+        self.wlan_deprecated_configuration_lib = FuchsiaWlanDeprecatedConfigurationLib(
+            self.address
+        )
 
         # Grab commands from FuchsiaWlanLib
         self.wlan_lib = FuchsiaWlanLib(self.address)
 
-        # Grab commands from FuchsiaWlanApPolicyLib
-        self.wlan_ap_policy_lib = FuchsiaWlanApPolicyLib(self.address)
-
         # Grab commands from FuchsiaWlanPolicyLib
         self.wlan_policy_lib = FuchsiaWlanPolicyLib(self.address)
-
-    def _verify_sl4f_connection(self) -> None:
-        """Verify SL4F commands can run on server."""
-
-        self.log.info('Verifying SL4F commands can run.')
-        try:
-            self.wlan_lib.wlanGetIfaceIdList()
-        except Exception as err:
-            raise ConnectionError(
-                f'Failed to connect and run command via SL4F. Err: {err}')
diff --git a/src/antlion/controllers/fuchsia_lib/ssh.py b/src/antlion/controllers/fuchsia_lib/ssh.py
index ec8f762..1d1f421 100644
--- a/src/antlion/controllers/fuchsia_lib/ssh.py
+++ b/src/antlion/controllers/fuchsia_lib/ssh.py
@@ -14,225 +14,25 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import subprocess
 import time
 
-from dataclasses import dataclass
-from typing import List, Union
-
-from antlion import logger
-from antlion import signals
+from antlion.capabilities.ssh import SSHError, SSHProvider
 
 DEFAULT_SSH_USER: str = "fuchsia"
-DEFAULT_SSH_PORT: int = 22
 DEFAULT_SSH_PRIVATE_KEY: str = "~/.ssh/fuchsia_ed25519"
-DEFAULT_SSH_TIMEOUT_SEC: int = 60
-DEFAULT_SSH_CONNECT_TIMEOUT_SEC: int = 30
-DEFAULT_SSH_SERVER_ALIVE_INTERVAL: int = 30
 # The default package repository for all components.
-FUCHSIA_PACKAGE_REPO_NAME = 'fuchsia.com'
+FUCHSIA_PACKAGE_REPO_NAME = "fuchsia.com"
 
 
-class SSHResult:
-    """Result of an SSH command."""
-
-    def __init__(
-        self, process: Union[subprocess.CompletedProcess,
-                             subprocess.CalledProcessError]
-    ) -> None:
-        self._raw_stdout = process.stdout
-        self._stdout = process.stdout.decode('utf-8', errors='replace')
-        self._stderr = process.stderr.decode('utf-8', errors='replace')
-        self._exit_status: int = process.returncode
-
-    def __str__(self):
-        if self.exit_status == 0:
-            return self.stdout
-        return f'status {self.exit_status}, stdout: "{self.stdout}", stderr: "{self.stderr}"'
-
-    @property
-    def stdout(self) -> str:
-        return self._stdout
-
-    @property
-    def stderr(self) -> str:
-        return self._stderr
-
-    @property
-    def exit_status(self) -> int:
-        return self._exit_status
-
-    @property
-    def raw_stdout(self) -> bytes:
-        return self._raw_stdout
-
-
-class FuchsiaSSHError(signals.TestError):
-    """A SSH command returned with a non-zero status code."""
-
-    def __init__(self, command: str, result: SSHResult):
-        super().__init__(
-            f'SSH command "{command}" unexpectedly returned {result}')
-        self.result = result
-
-
-class SSHTimeout(signals.TestError):
-    """A SSH command timed out."""
-
-    def __init__(self, err: subprocess.TimeoutExpired):
-        super().__init__(
-            f'SSH command "{err.cmd}" timed out after {err.timeout}s, '
-            f'stdout="{err.stdout}", stderr="{err.stderr}"')
-
-
-class FuchsiaSSHTransportError(signals.TestError):
-    """Failure to send an SSH command."""
-
-
-@dataclass
-class SSHConfig:
-    """SSH client config."""
-
-    # SSH flags. See ssh(1) for full details.
-    host_name: str
-    identity_file: str
-
-    ssh_binary: str = 'ssh'
-    config_file: str = '/dev/null'
-    port: int = 22
-    user: str = DEFAULT_SSH_USER
-
-    # SSH options. See ssh_config(5) for full details.
-    connect_timeout: int = DEFAULT_SSH_CONNECT_TIMEOUT_SEC
-    server_alive_interval: int = DEFAULT_SSH_SERVER_ALIVE_INTERVAL
-    strict_host_key_checking: bool = False
-    user_known_hosts_file: str = "/dev/null"
-    log_level: str = "ERROR"
-
-    def full_command(self, command: str, force_tty: bool = False) -> List[str]:
-        """Generate the complete command to execute command over SSH.
-
-        Args:
-            command: The command to run over SSH
-            force_tty: Force pseudo-terminal allocation. This can be used to
-                execute arbitrary screen-based programs on a remote machine,
-                which can be very useful, e.g. when implementing menu services.
-
-        Returns:
-            Arguments composing the complete call to SSH.
-        """
-        optional_flags = []
-        if force_tty:
-            # Multiple -t options force tty allocation, even if ssh has no local
-            # tty. This is necessary for launching ssh with subprocess without
-            # shell=True.
-            optional_flags.append('-tt')
-
-        return [
-            self.ssh_binary,
-            # SSH flags
-            '-i',
-            self.identity_file,
-            '-F',
-            self.config_file,
-            '-p',
-            str(self.port),
-            # SSH configuration options
-            '-o',
-            f'ConnectTimeout={self.connect_timeout}',
-            '-o',
-            f'ServerAliveInterval={self.server_alive_interval}',
-            '-o',
-            f'StrictHostKeyChecking={"yes" if self.strict_host_key_checking else "no"}',
-            '-o',
-            f'UserKnownHostsFile={self.user_known_hosts_file}',
-            '-o',
-            f'LogLevel={self.log_level}',
-        ] + optional_flags + [
-            f'{self.user}@{self.host_name}'
-        ] + command.split()
-
-
-class SSHProvider:
+class FuchsiaSSHProvider(SSHProvider):
     """Device-specific provider for SSH clients."""
 
-    def __init__(self, config: SSHConfig) -> None:
-        """
-        Args:
-            config: SSH client config
-        """
-        logger_tag = f"ssh | {config.host_name}"
-        if config.port != DEFAULT_SSH_PORT:
-            logger_tag += f':{config.port}'
-
-        # Check if the private key exists
-
-        self.log = logger.create_tagged_trace_logger(logger_tag)
-        self.config = config
-
-    def run(self,
-            command: str,
-            timeout_sec: int = DEFAULT_SSH_TIMEOUT_SEC,
-            connect_retries: int = 3,
-            force_tty: bool = False) -> SSHResult:
-        """Run a command on the device then exit.
-
-        Args:
-            command: String to send to the device.
-            timeout_sec: Seconds to wait for the command to complete.
-            connect_retries: Amount of times to retry connect on fail.
-            force_tty: Force pseudo-terminal allocation.
-
-        Raises:
-            FuchsiaSSHError: if the SSH command returns a non-zero status code
-            FuchsiaSSHTimeout: if there is no response within timeout_sec
-            FuchsiaSSHTransportError: if SSH fails to run the command
-
-        Returns:
-            SSHResults from the executed command.
-        """
-        err: Exception
-        for i in range(0, connect_retries):
-            try:
-                return self._run(command, timeout_sec, force_tty)
-            except FuchsiaSSHTransportError as e:
-                err = e
-                self.log.warn(f'Connect failed: {e}')
-        raise err
-
-    def _run(self, command: str, timeout_sec: int, force_tty: bool) -> SSHResult:
-        full_command = self.config.full_command(command, force_tty)
-        self.log.debug(f'Running "{" ".join(full_command)}"')
-        try:
-            process = subprocess.run(full_command,
-                                     capture_output=True,
-                                     timeout=timeout_sec,
-                                     check=True)
-        except subprocess.CalledProcessError as e:
-            if e.returncode == 255:
-                stderr = e.stderr.decode('utf-8', errors='replace')
-                if 'Name or service not known' in stderr or 'Host does not exist' in stderr:
-                    raise FuchsiaSSHTransportError(
-                        f'Hostname {self.config.host_name} cannot be resolved to an address'
-                    ) from e
-                if 'Connection timed out' in stderr:
-                    raise FuchsiaSSHTransportError(
-                        f'Failed to establish a connection to {self.config.host_name} within {timeout_sec}s'
-                    ) from e
-                if 'Connection refused' in stderr:
-                    raise FuchsiaSSHTransportError(
-                        f'Connection refused by {self.config.host_name}') from e
-
-            raise FuchsiaSSHError(command, SSHResult(e)) from e
-        except subprocess.TimeoutExpired as e:
-            raise SSHTimeout(e) from e
-
-        return SSHResult(process)
-
-    def start_v1_component(self,
-                           component: str,
-                           timeout_sec: int = 5,
-                           repo: str = FUCHSIA_PACKAGE_REPO_NAME) -> None:
+    def start_v1_component(
+        self,
+        component: str,
+        timeout_sec: int = 5,
+        repo: str = FUCHSIA_PACKAGE_REPO_NAME,
+    ) -> None:
         """Start a CFv1 component in the background.
 
         Args:
@@ -246,26 +46,34 @@
         # The "run -d" command will hang when executed without a pseudo-tty
         # allocated.
         self.run(
-            f'run -d fuchsia-pkg://{repo}/{component}#meta/{component}.cmx', force_tty=True)
+            f"run -d fuchsia-pkg://{repo}/{component}#meta/{component}.cmx",
+            force_tty=True,
+        )
 
         timeout = time.perf_counter() + timeout_sec
         while True:
             ps_cmd = self.run("ps")
-            if f'{component}.cmx' in ps_cmd.stdout:
+            if f"{component}.cmx" in ps_cmd.stdout:
                 return
             if time.perf_counter() > timeout:
                 raise TimeoutError(
-                    f'Failed to start "{component}.cmx" after {timeout_sec}s')
+                    f'Failed to start "{component}.cmx" after {timeout_sec}s'
+                )
 
-    def stop_v1_component(self, component: str) -> None:
-        """Stop all instances of a CFv1 component.
+    def stop_component(self, component: str, is_cfv2_component: bool = False) -> None:
+        """Stop all instances of a CFv1 or CFv2 component.
 
         Args:
-            component: Name of the component without ".cmx"
+            component: Name of the component without suffix("cm" or "cmx").
+            is_cfv2_component: Determines the component suffix to use.
         """
+        suffix = "cm" if is_cfv2_component else "cmx"
+
         try:
-            self.run(f'killall {component}.cmx')
-        except FuchsiaSSHError as e:
-            if 'no tasks found' in e.result.stderr:
+            self.run(f"killall {component}.{suffix}")
+            self.log.info(f"Stopped component: {component}.{suffix}")
+        except SSHError as e:
+            if "no tasks found" in e.result.stderr:
+                self.log.debug(f"Could not find component: {component}.{suffix}")
                 return
             raise e
diff --git a/src/antlion/controllers/fuchsia_lib/utils_lib.py b/src/antlion/controllers/fuchsia_lib/utils_lib.py
index 897749f..1e1336a 100644
--- a/src/antlion/controllers/fuchsia_lib/utils_lib.py
+++ b/src/antlion/controllers/fuchsia_lib/utils_lib.py
@@ -16,18 +16,17 @@
 
 import os
 import logging
-import socket
 import tarfile
 import tempfile
 import time
 
 from antlion import utils
-from antlion.controllers.fuchsia_lib.ssh import FuchsiaSSHError
 from antlion.libs.proc import job
 from antlion.utils import get_fuchsia_mdns_ipv6_address
 
 MDNS_LOOKUP_RETRY_MAX = 3
 FASTBOOT_TIMEOUT = 30
+FLASH_TIMEOUT_SEC = 60 * 5  # 5 minutes
 AFTER_FLASH_BOOT_TIME = 30
 WAIT_FOR_EXISTING_FLASH_TO_FINISH_SEC = 360
 PROCESS_CHECK_WAIT_TIME_SEC = 30
@@ -36,8 +35,7 @@
 FUCHSIA_RELEASE_TESTING_URL = "gs://fuchsia-release-testing/images"
 
 
-def flash(fuchsia_device, use_ssh=False,
-          fuchsia_reconnect_after_reboot_time=5):
+def flash(fuchsia_device, use_ssh=False, fuchsia_reconnect_after_reboot_time=5):
     """A function to flash, not pave, a fuchsia_device
 
     Args:
@@ -47,24 +45,27 @@
         True if successful.
     """
     if not fuchsia_device.authorized_file:
-        raise ValueError('A ssh authorized_file must be present in the '
-                         'ACTS config to flash fuchsia_devices.')
+        raise ValueError(
+            "A ssh authorized_file must be present in the "
+            "ACTS config to flash fuchsia_devices."
+        )
     # This is the product type from the fx set command.
     # Do 'fx list-products' to see options in Fuchsia source tree.
     if not fuchsia_device.product_type:
-        raise ValueError('A product type must be specified to flash '
-                         'fuchsia_devices.')
+        raise ValueError(
+            "A product type must be specified to flash " "fuchsia_devices."
+        )
     # This is the board type from the fx set command.
     # Do 'fx list-boards' to see options in Fuchsia source tree.
     if not fuchsia_device.board_type:
-        raise ValueError('A board type must be specified to flash '
-                         'fuchsia_devices.')
+        raise ValueError("A board type must be specified to flash " "fuchsia_devices.")
     if not fuchsia_device.build_number:
-        fuchsia_device.build_number = 'LATEST'
+        fuchsia_device.build_number = "LATEST"
     if not fuchsia_device.mdns_name:
         raise ValueError(
-            'Either fuchsia_device mdns_name must be specified or '
-            'ip must be the mDNS name to be able to flash.')
+            "Either fuchsia_device mdns_name must be specified or "
+            "ip must be the mDNS name to be able to flash."
+        )
 
     file_to_download = None
     image_archive_path = None
@@ -73,34 +74,35 @@
     if not fuchsia_device.specific_image:
         product_build = fuchsia_device.product_type
         if fuchsia_device.build_type:
-            product_build = f'{product_build}_{fuchsia_device.build_type}'
-        if 'LATEST' in fuchsia_device.build_number:
-            sdk_version = 'sdk'
-            if 'LATEST_F' in fuchsia_device.build_number:
-                f_branch = fuchsia_device.build_number.split('LATEST_F', 1)[1]
-                sdk_version = f'f{f_branch}_sdk'
+            product_build = f"{product_build}_{fuchsia_device.build_type}"
+        if "LATEST" in fuchsia_device.build_number:
+            sdk_version = "sdk"
+            if "LATEST_F" in fuchsia_device.build_number:
+                f_branch = fuchsia_device.build_number.split("LATEST_F", 1)[1]
+                sdk_version = f"f{f_branch}_sdk"
             file_to_download = (
-                f'{FUCHSIA_RELEASE_TESTING_URL}/'
-                f'{sdk_version}-{product_build}.{fuchsia_device.board_type}-release.tgz'
+                f"{FUCHSIA_RELEASE_TESTING_URL}/"
+                f"{sdk_version}-{product_build}.{fuchsia_device.board_type}-release.tgz"
             )
         else:
             # Must be a fully qualified build number (e.g. 5.20210721.4.1215)
             file_to_download = (
-                f'{FUCHSIA_SDK_URL}/{fuchsia_device.build_number}/images/'
-                f'{product_build}.{fuchsia_device.board_type}-release.tgz')
-    elif 'gs://' in fuchsia_device.specific_image:
+                f"{FUCHSIA_SDK_URL}/{fuchsia_device.build_number}/images/"
+                f"{product_build}.{fuchsia_device.board_type}-release.tgz"
+            )
+    elif "gs://" in fuchsia_device.specific_image:
         file_to_download = fuchsia_device.specific_image
     elif os.path.isdir(fuchsia_device.specific_image):
         image_path = fuchsia_device.specific_image
     elif tarfile.is_tarfile(fuchsia_device.specific_image):
         image_archive_path = fuchsia_device.specific_image
     else:
-        raise ValueError(
-            f'Invalid specific_image "{fuchsia_device.specific_image}"')
+        raise ValueError(f'Invalid specific_image "{fuchsia_device.specific_image}"')
 
     if image_path:
-        reboot_to_bootloader(fuchsia_device, use_ssh,
-                             fuchsia_reconnect_after_reboot_time)
+        reboot_to_bootloader(
+            fuchsia_device, use_ssh, fuchsia_reconnect_after_reboot_time
+        )
         logging.info(
             f'Flashing {fuchsia_device.mdns_name} with {image_path} using authorized keys "{fuchsia_device.authorized_file}".'
         )
@@ -109,18 +111,19 @@
         suffix = fuchsia_device.board_type
         with tempfile.TemporaryDirectory(suffix=suffix) as image_path:
             if file_to_download:
-                logging.info(f'Downloading {file_to_download} to {image_path}')
-                job.run(f'gsutil cp {file_to_download} {image_path}')
+                logging.info(f"Downloading {file_to_download} to {image_path}")
+                job.run(f"gsutil cp {file_to_download} {image_path}")
                 image_archive_path = os.path.join(
-                    image_path, os.path.basename(file_to_download))
+                    image_path, os.path.basename(file_to_download)
+                )
 
             if image_archive_path:
                 # Use tar command instead of tarfile.extractall, as it takes too long.
-                job.run(f'tar xfvz {image_archive_path} -C {image_path}',
-                        timeout=120)
+                job.run(f"tar xfvz {image_archive_path} -C {image_path}", timeout=120)
 
-            reboot_to_bootloader(fuchsia_device, use_ssh,
-                                 fuchsia_reconnect_after_reboot_time)
+            reboot_to_bootloader(
+                fuchsia_device, use_ssh, fuchsia_reconnect_after_reboot_time
+            )
 
             logging.info(
                 f'Flashing {fuchsia_device.mdns_name} with {image_archive_path} using authorized keys "{fuchsia_device.authorized_file}".'
@@ -129,15 +132,15 @@
     return True
 
 
-def reboot_to_bootloader(fuchsia_device,
-                         use_ssh=False,
-                         fuchsia_reconnect_after_reboot_time=5):
+def reboot_to_bootloader(
+    fuchsia_device, use_ssh=False, fuchsia_reconnect_after_reboot_time=5
+):
     import psutil
     import usbinfo
+    from antlion.controllers.fuchsia_lib.ssh import SSHError
 
     if use_ssh:
-        logging.info('Sending reboot command via SSH to '
-                     'get into bootloader.')
+        logging.info("Sending reboot command via SSH to " "get into bootloader.")
         # Sending this command will put the device in fastboot
         # but it does not guarantee the device will be in fastboot
         # after this command.  There is no check so if there is an
@@ -145,9 +148,10 @@
         # other check needs to be done.
         try:
             fuchsia_device.ssh.run(
-                'dm rb', timeout_sec=fuchsia_reconnect_after_reboot_time)
-        except FuchsiaSSHError as e:
-            if 'closed by remote host' not in e.result.stderr:
+                "dm rb", timeout_sec=fuchsia_reconnect_after_reboot_time
+            )
+        except SSHError as e:
+            if "closed by remote host" not in e.result.stderr:
                 raise e
     else:
         pass
@@ -155,25 +159,35 @@
 
     time_counter = 0
     while time_counter < FASTBOOT_TIMEOUT:
-        logging.info('Checking to see if fuchsia_device(%s) SN: %s is in '
-                     'fastboot. (Attempt #%s Timeout: %s)' %
-                     (fuchsia_device.mdns_name, fuchsia_device.serial_number,
-                      str(time_counter + 1), FASTBOOT_TIMEOUT))
+        logging.info(
+            "Checking to see if fuchsia_device(%s) SN: %s is in "
+            "fastboot. (Attempt #%s Timeout: %s)"
+            % (
+                fuchsia_device.mdns_name,
+                fuchsia_device.serial_number,
+                str(time_counter + 1),
+                FASTBOOT_TIMEOUT,
+            )
+        )
         for usb_device in usbinfo.usbinfo():
-            if (usb_device['iSerialNumber'] == fuchsia_device.serial_number
-                    and usb_device['iProduct'] == 'USB_download_gadget'):
+            if (
+                usb_device["iSerialNumber"] == fuchsia_device.serial_number
+                and usb_device["iProduct"] == "USB_download_gadget"
+            ):
                 logging.info(
-                    'fuchsia_device(%s) SN: %s is in fastboot.' %
-                    (fuchsia_device.mdns_name, fuchsia_device.serial_number))
+                    "fuchsia_device(%s) SN: %s is in fastboot."
+                    % (fuchsia_device.mdns_name, fuchsia_device.serial_number)
+                )
                 time_counter = FASTBOOT_TIMEOUT
         time_counter = time_counter + 1
         if time_counter == FASTBOOT_TIMEOUT:
             for fail_usb_device in usbinfo.usbinfo():
                 logging.debug(fail_usb_device)
             raise TimeoutError(
-                'fuchsia_device(%s) SN: %s '
-                'never went into fastboot' %
-                (fuchsia_device.mdns_name, fuchsia_device.serial_number))
+                "fuchsia_device(%s) SN: %s "
+                "never went into fastboot"
+                % (fuchsia_device.mdns_name, fuchsia_device.serial_number)
+            )
         time.sleep(1)
 
     end_time = time.time() + WAIT_FOR_EXISTING_FLASH_TO_FINISH_SEC
@@ -182,8 +196,7 @@
         flash_process_found = False
         for proc in psutil.process_iter():
             if "bash" in proc.name() and "flash.sh" in proc.cmdline():
-                logging.info(
-                    "Waiting for existing flash.sh process to complete.")
+                logging.info("Waiting for existing flash.sh process to complete.")
                 time.sleep(PROCESS_CHECK_WAIT_TIME_SEC)
                 flash_process_found = True
         if not flash_process_found:
@@ -193,16 +206,19 @@
 def run_flash_script(fuchsia_device, flash_dir):
     try:
         flash_output = job.run(
-            f'bash {flash_dir}/flash.sh --ssh-key={fuchsia_device.authorized_file} -s {fuchsia_device.serial_number}',
-            timeout=120)
+            f"bash {flash_dir}/flash.sh --ssh-key={fuchsia_device.authorized_file} -s {fuchsia_device.serial_number}",
+            timeout=FLASH_TIMEOUT_SEC,
+        )
         logging.debug(flash_output.stderr)
     except job.TimeoutError as err:
         raise TimeoutError(err)
 
-    logging.info('Waiting %s seconds for device'
-                 ' to come back up after flashing.' % AFTER_FLASH_BOOT_TIME)
+    logging.info(
+        "Waiting %s seconds for device"
+        " to come back up after flashing." % AFTER_FLASH_BOOT_TIME
+    )
     time.sleep(AFTER_FLASH_BOOT_TIME)
-    logging.info('Updating device to new IP addresses.')
+    logging.info("Updating device to new IP addresses.")
     mdns_ip = None
     for retry_counter in range(MDNS_LOOKUP_RETRY_MAX):
         mdns_ip = get_fuchsia_mdns_ipv6_address(fuchsia_device.mdns_name)
@@ -211,38 +227,13 @@
         else:
             time.sleep(1)
     if mdns_ip and utils.is_valid_ipv6_address(mdns_ip):
-        logging.info('IP for fuchsia_device(%s) changed from %s to %s' %
-                     (fuchsia_device.mdns_name, fuchsia_device.ip, mdns_ip))
+        logging.info(
+            "IP for fuchsia_device(%s) changed from %s to %s"
+            % (fuchsia_device.mdns_name, fuchsia_device.ip, mdns_ip)
+        )
         fuchsia_device.ip = mdns_ip
         fuchsia_device.address = "http://[{}]:{}".format(
-            fuchsia_device.ip, fuchsia_device.sl4f_port)
+            fuchsia_device.ip, fuchsia_device.sl4f_port
+        )
     else:
-        raise ValueError('Invalid IP: %s after flashing.' %
-                         fuchsia_device.mdns_name)
-
-
-def wait_for_port(host: str, port: int, timeout_sec: int = 5) -> None:
-    """Wait for the host to start accepting connections on the port.
-
-    Some services take some time to start. Call this after launching the service
-    to avoid race conditions.
-
-    Args:
-        host: IP of the running service.
-        port: Port of the running service.
-        timeout_sec: Seconds to wait until raising TimeoutError
-
-    Raises:
-        TimeoutError: when timeout_sec has expired without a successful
-            connection to the service
-    """
-    timeout = time.perf_counter() + timeout_sec
-    while True:
-        try:
-            with socket.create_connection((host, port), timeout=timeout_sec):
-                return
-        except ConnectionRefusedError as e:
-            if time.perf_counter() > timeout:
-                raise TimeoutError(
-                    f'Waited over {timeout_sec}s for the service to start '
-                    f'accepting connections at {host}:{port}') from e
+        raise ValueError("Invalid IP: %s after flashing." % fuchsia_device.mdns_name)
diff --git a/src/antlion/controllers/fuchsia_lib/wlan_ap_policy_lib.py b/src/antlion/controllers/fuchsia_lib/wlan_ap_policy_lib.py
index 07ff2f8..54486d9 100644
--- a/src/antlion/controllers/fuchsia_lib/wlan_ap_policy_lib.py
+++ b/src/antlion/controllers/fuchsia_lib/wlan_ap_policy_lib.py
@@ -18,26 +18,26 @@
 
 
 class FuchsiaWlanApPolicyLib(BaseLib):
-
     def __init__(self, addr: str) -> None:
         super().__init__(addr, "wlan_ap_policy")
 
-    def wlanStartAccessPoint(self, target_ssid, security_type, target_pwd,
-                             connectivity_mode, operating_band):
-        """ Start an Access Point.
-                Args:
-                    target_ssid: the network to attempt a connection to
-                    security_type: the security protocol of the network. Possible inputs:
-                    	"none", "wep", "wpa", "wpa2", "wpa3"
-                    target_pwd: (optional) credential being saved with the network. No password
-                                is equivalent to empty string.
-                    connectivity_mode: the connectivity mode to use. Possible inputs:
-                    	"local_only", "unrestricted"
-                    operating_band: The operating band to use. Possible inputs:
-                    	"any", "only_2_4_ghz", "only_5_ghz"
+    def wlanStartAccessPoint(
+        self, target_ssid, security_type, target_pwd, connectivity_mode, operating_band
+    ):
+        """Start an Access Point.
+        Args:
+            target_ssid: the network to attempt a connection to
+            security_type: the security protocol of the network. Possible inputs:
+                "none", "wep", "wpa", "wpa2", "wpa3"
+            target_pwd: (optional) credential being saved with the network. No password
+                        is equivalent to empty string.
+            connectivity_mode: the connectivity mode to use. Possible inputs:
+                "local_only", "unrestricted"
+            operating_band: The operating band to use. Possible inputs:
+                "any", "only_2_4_ghz", "only_5_ghz"
 
-                Returns:
-                    boolean indicating if the action was successful
+        Returns:
+            boolean indicating if the action was successful
         """
 
         test_cmd = "wlan_ap_policy.start_access_point"
@@ -53,15 +53,15 @@
         return self.send_command(test_cmd, test_args)
 
     def wlanStopAccessPoint(self, target_ssid, security_type, target_pwd=""):
-        """ Stops an active Access Point.
-                Args:
-                    target_ssid: the network to attempt a connection to
-                    security_type: the security protocol of the network
-                    target_pwd: (optional) credential being saved with the network. No password
-                                is equivalent to empty string.
+        """Stops an active Access Point.
+        Args:
+            target_ssid: the network to attempt a connection to
+            security_type: the security protocol of the network
+            target_pwd: (optional) credential being saved with the network. No password
+                        is equivalent to empty string.
 
-                Returns:
-                    boolean indicating if the action was successful
+        Returns:
+            boolean indicating if the action was successful
         """
 
         test_cmd = "wlan_ap_policy.stop_access_point"
@@ -69,16 +69,16 @@
         test_args = {
             "target_ssid": target_ssid,
             "security_type": security_type.lower(),
-            "target_pwd": target_pwd
+            "target_pwd": target_pwd,
         }
 
         return self.send_command(test_cmd, test_args)
 
     def wlanStopAllAccessPoint(self):
-        """ Stops all Access Points
+        """Stops all Access Points
 
-                Returns:
-                    boolean indicating if the actions were successful
+        Returns:
+            boolean indicating if the actions were successful
         """
 
         test_cmd = "wlan_ap_policy.stop_all_access_points"
@@ -88,22 +88,22 @@
         return self.send_command(test_cmd, test_args)
 
     def wlanSetNewListener(self):
-        """ Sets the update listener stream of the facade to a new stream so that updates will be
-            reset. Intended to be used between tests so that the behaviour of updates in a test is
-            independent from previous tests.
+        """Sets the update listener stream of the facade to a new stream so that updates will be
+        reset. Intended to be used between tests so that the behaviour of updates in a test is
+        independent from previous tests.
         """
         test_cmd = "wlan_ap_policy.set_new_update_listener"
 
         return self.send_command(test_cmd, {})
 
     def wlanGetUpdate(self, timeout=30):
-        """ Gets a list of AP state updates. This call will return with an update immediately the
-            first time the update listener is initialized by setting a new listener or by creating
-            a client controller before setting a new listener. Subsequent calls will hang until
-            there is an update.
-            Returns:
-                A list of AP state updated. If there is no error, the result is a list with a
-                structure that matches the FIDL AccessPointState struct given for updates.
+        """Gets a list of AP state updates. This call will return with an update immediately the
+        first time the update listener is initialized by setting a new listener or by creating
+        a client controller before setting a new listener. Subsequent calls will hang until
+        there is an update.
+        Returns:
+            A list of AP state updated. If there is no error, the result is a list with a
+            structure that matches the FIDL AccessPointState struct given for updates.
         """
         test_cmd = "wlan_ap_policy.get_update"
 
diff --git a/src/antlion/controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py b/src/antlion/controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py
index d229049..a53698b 100644
--- a/src/antlion/controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py
+++ b/src/antlion/controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py
@@ -19,18 +19,17 @@
 
 
 class FuchsiaWlanDeprecatedConfigurationLib(BaseLib):
-
     def __init__(self, addr: str) -> None:
         super().__init__(addr, "wlan_deprecated")
 
     def wlanSuggestAccessPointMacAddress(self, addr):
-        """ Suggests a mac address to soft AP interface, to support
+        """Suggests a mac address to soft AP interface, to support
         cast legacy behavior.
 
         Args:
             addr: string of mac address to suggest (e.g. '12:34:56:78:9a:bc')
         """
-        test_cmd = 'wlan_deprecated.suggest_ap_mac'
-        test_args = {'mac': addr}
+        test_cmd = "wlan_deprecated.suggest_ap_mac"
+        test_args = {"mac": addr}
 
         return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/wlan_lib.py b/src/antlion/controllers/fuchsia_lib/wlan_lib.py
index c68fa11..9ed274a 100644
--- a/src/antlion/controllers/fuchsia_lib/wlan_lib.py
+++ b/src/antlion/controllers/fuchsia_lib/wlan_lib.py
@@ -30,12 +30,11 @@
 
 
 class FuchsiaWlanLib(BaseLib):
-
     def __init__(self, addr: str) -> None:
         super().__init__(addr, "wlan")
 
     def wlanStartScan(self):
-        """ Starts a wlan scan
+        """Starts a wlan scan
 
         Returns:
             scan results
@@ -45,7 +44,7 @@
         return self.send_command(test_cmd, {})
 
     def wlanScanForBSSInfo(self):
-        """ Scans and returns BSS info
+        """Scans and returns BSS info
 
         Returns:
             A dict mapping each seen SSID to a list of BSS Description IE
@@ -55,11 +54,8 @@
 
         return self.send_command(test_cmd, {})
 
-    def wlanConnectToNetwork(self,
-                             target_ssid,
-                             target_bss_desc,
-                             target_pwd=None):
-        """ Triggers a network connection
+    def wlanConnectToNetwork(self, target_ssid, target_bss_desc, target_pwd=None):
+        """Triggers a network connection
         Args:
             target_ssid: the network to attempt a connection to
             target_pwd: (optional) password for the target network
@@ -71,19 +67,19 @@
         test_args = {
             "target_ssid": target_ssid,
             "target_pwd": target_pwd,
-            "target_bss_desc": target_bss_desc
+            "target_bss_desc": target_bss_desc,
         }
 
         return self.send_command(test_cmd, test_args)
 
     def wlanDisconnect(self):
-        """ Disconnect any current wifi connections"""
+        """Disconnect any current wifi connections"""
         test_cmd = COMMAND_DISCONNECT
 
         return self.send_command(test_cmd, {})
 
     def wlanDestroyIface(self, iface_id):
-        """ Destroy WLAN interface by ID.
+        """Destroy WLAN interface by ID.
         Args:
             iface_id: the interface id.
 
@@ -96,7 +92,7 @@
         return self.send_command(test_cmd, test_args)
 
     def wlanGetIfaceIdList(self):
-        """ Get a list if wlan interface IDs.
+        """Get a list if wlan interface IDs.
 
         Returns:
             Dictionary, service id if success, error if error.
@@ -106,7 +102,7 @@
         return self.send_command(test_cmd, {})
 
     def wlanPhyIdList(self):
-        """ Get a list if wlan phy IDs.
+        """Get a list if wlan phy IDs.
 
         Returns:
             List of IDs if success, error if error.
@@ -116,7 +112,7 @@
         return self.send_command(test_cmd, {})
 
     def wlanStatus(self, iface_id=None):
-        """ Request connection status
+        """Request connection status
 
         Args:
             iface_id: unsigned 16-bit int, the wlan interface id
@@ -129,12 +125,12 @@
         test_cmd = COMMAND_STATUS
         test_args = {}
         if iface_id:
-            test_args = {'iface_id': iface_id}
+            test_args = {"iface_id": iface_id}
 
         return self.send_command(test_cmd, test_args)
 
     def wlanGetCountry(self, phy_id):
-        """ Reads the currently configured country for `phy_id`.
+        """Reads the currently configured country for `phy_id`.
 
         Args:
             phy_id: unsigned 16-bit integer.
@@ -148,7 +144,7 @@
         return self.send_command(test_cmd, test_args)
 
     def wlanGetDevPath(self, phy_id):
-        """ Queries the device path for `phy_id`.
+        """Queries the device path for `phy_id`.
 
         Args:
             phy_id: unsigned 16-bit integer.
@@ -162,7 +158,7 @@
         return self.send_command(test_cmd, test_args)
 
     def wlanQueryInterface(self, iface_id):
-        """ Retrieves interface info for given wlan iface id.
+        """Retrieves interface info for given wlan iface id.
 
         Args:
             iface_id: unsigned 16-bit int, the wlan interface id.
@@ -172,6 +168,6 @@
             and mac addr.
         """
         test_cmd = COMMAND_QUERY_IFACE
-        test_args = {'iface_id': iface_id}
+        test_args = {"iface_id": iface_id}
 
         return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/wlan_policy_lib.py b/src/antlion/controllers/fuchsia_lib/wlan_policy_lib.py
index 37e70fa..94701d7 100644
--- a/src/antlion/controllers/fuchsia_lib/wlan_policy_lib.py
+++ b/src/antlion/controllers/fuchsia_lib/wlan_policy_lib.py
@@ -35,79 +35,78 @@
 
 
 class FuchsiaWlanPolicyLib(BaseLib):
-
     def __init__(self, addr: str) -> None:
         super().__init__(addr, "wlan_policy")
 
     def wlanStartClientConnections(self):
-        """ Enables device to initiate connections to networks """
+        """Enables device to initiate connections to networks"""
 
         test_cmd = COMMAND_START_CLIENT_CONNECTIONS
 
         return self.send_command(test_cmd, {})
 
     def wlanStopClientConnections(self):
-        """ Disables device for initiating connections to networks """
+        """Disables device for initiating connections to networks"""
 
         test_cmd = COMMAND_STOP_CLIENT_CONNECTIONS
 
         return self.send_command(test_cmd, {})
 
     def wlanScanForNetworks(self):
-        """ Scans for networks that can be connected to
-                Returns:
-                    A list of network names and security types
-         """
+        """Scans for networks that can be connected to
+        Returns:
+            A list of network names and security types
+        """
 
         test_cmd = COMMAND_SCAN_FOR_NETWORKS
 
         return self.send_command(test_cmd, {})
 
     def wlanSaveNetwork(self, target_ssid, security_type, target_pwd=None):
-        """ Saveds a network to the device for future connections
-                Args:
-                    target_ssid: the network to attempt a connection to
-                    security_type: the security protocol of the network
-                    target_pwd: (optional) credential being saved with the network. No password
-                                is equivalent to empty string.
+        """Saveds a network to the device for future connections
+        Args:
+            target_ssid: the network to attempt a connection to
+            security_type: the security protocol of the network
+            target_pwd: (optional) credential being saved with the network. No password
+                        is equivalent to empty string.
 
-                Returns:
-                    boolean indicating if the connection was successful
+        Returns:
+            boolean indicating if the connection was successful
         """
         if not target_pwd:
-            target_pwd = ''
+            target_pwd = ""
         test_cmd = COMMAND_SAVE_NETWORK
         test_args = {
             "target_ssid": target_ssid,
             "security_type": str(security_type).lower(),
-            "target_pwd": target_pwd
+            "target_pwd": target_pwd,
         }
 
         return self.send_command(test_cmd, test_args)
 
     def wlanRemoveNetwork(self, target_ssid, security_type, target_pwd=None):
-        """ Removes or "forgets" a network from saved networks
-                Args:
-                    target_ssid: the network to attempt a connection to
-                    security_type: the security protocol of the network
-                    target_pwd: (optional) credential of the network to remove. No password and
-                                empty string are equivalent.
+        """Removes or "forgets" a network from saved networks
+        Args:
+            target_ssid: the network to attempt a connection to
+            security_type: the security protocol of the network
+            target_pwd: (optional) credential of the network to remove. No password and
+                        empty string are equivalent.
         """
         if not target_pwd:
-            target_pwd = ''
+            target_pwd = ""
         test_cmd = COMMAND_REMOVE_NETWORK
         test_args = {
             "target_ssid": target_ssid,
             "security_type": str(security_type).lower(),
-            "target_pwd": target_pwd
+            "target_pwd": target_pwd,
         }
 
         return self.send_command(test_cmd, test_args)
 
     def wlanRemoveAllNetworks(self):
-        """ Removes or "forgets" all networks from saved networks
-                Returns:
-                    A boolean indicating if the action was successful
+        """Removes or "forgets" all networks from saved networks
+        Returns:
+            A boolean indicating if the action was successful
         """
 
         test_cmd = COMMAND_REMOVE_ALL_NETWORKS
@@ -115,10 +114,10 @@
         return self.send_command(test_cmd, {})
 
     def wlanGetSavedNetworks(self):
-        """ Gets networks saved on device. Any PSK of a saved network will be
-            lower case regardless of how it was saved.
-                Returns:
-                    A list of saved network names and security protocols
+        """Gets networks saved on device. Any PSK of a saved network will be
+        lower case regardless of how it was saved.
+            Returns:
+                A list of saved network names and security protocols
         """
 
         test_cmd = COMMAND_GET_SAVED_NETWORKS
@@ -126,57 +125,57 @@
         return self.send_command(test_cmd, {})
 
     def wlanConnect(self, target_ssid, security_type):
-        """ Triggers connection to a network
-                Args:
-                    target_ssid: the network to attempt a connection to. Must have been previously
-                                 saved in order for a successful connection to happen.
-                    security_type: the security protocol of the network
+        """Triggers connection to a network
+            Args:
+                target_ssid: the network to attempt a connection to. Must have been previously
+                             saved in order for a successful connection to happen.
+                security_type: the security protocol of the network
 
-            Returns:
-                    boolean indicating if the connection was successful
+        Returns:
+                boolean indicating if the connection was successful
         """
 
         test_cmd = COMMAND_CONNECT
         test_args = {
             "target_ssid": target_ssid,
-            "security_type": str(security_type).lower()
+            "security_type": str(security_type).lower(),
         }
 
         return self.send_command(test_cmd, test_args)
 
     def wlanCreateClientController(self):
-        """ Initializes the client controller of the facade that is used to make Client Controller
-            API calls
+        """Initializes the client controller of the facade that is used to make Client Controller
+        API calls
         """
         test_cmd = COMMAND_CREATE_CLIENT_CONTROLLER
 
         return self.send_command(test_cmd, {})
 
     def wlanSetNewListener(self):
-        """ Sets the update listener stream of the facade to a new stream so that updates will be
-            reset. Intended to be used between tests so that the behaviour of updates in a test is
-            independent from previous tests.
+        """Sets the update listener stream of the facade to a new stream so that updates will be
+        reset. Intended to be used between tests so that the behaviour of updates in a test is
+        independent from previous tests.
         """
         test_cmd = COMMAND_SET_NEW_LISTENER
 
         return self.send_command(test_cmd, {})
 
     def wlanRemoveAllNetworks(self):
-        """ Deletes all saved networks on the device. Relies directly on the get_saved_networks and
-            remove_network commands
+        """Deletes all saved networks on the device. Relies directly on the get_saved_networks and
+        remove_network commands
         """
         test_cmd = COMMAND_REMOVE_ALL_NETWORKS
 
         return self.send_command(test_cmd, {})
 
     def wlanGetUpdate(self, timeout=30):
-        """ Gets one client listener update. This call will return with an update immediately the
-            first time the update listener is initialized by setting a new listener or by creating
-            a client controller before setting a new listener. Subsequent calls will hang until
-            there is an update.
-            Returns:
-                An update of connection status. If there is no error, the result is a dict with a
-                structure that matches the FIDL ClientStateSummary struct given for updates.
+        """Gets one client listener update. This call will return with an update immediately the
+        first time the update listener is initialized by setting a new listener or by creating
+        a client controller before setting a new listener. Subsequent calls will hang until
+        there is an update.
+        Returns:
+            An update of connection status. If there is no error, the result is a dict with a
+            structure that matches the FIDL ClientStateSummary struct given for updates.
         """
         test_cmd = COMMAND_GET_UPDATE
 
diff --git a/src/antlion/controllers/gnss_lib/GnssSimulator.py b/src/antlion/controllers/gnss_lib/GnssSimulator.py
deleted file mode 100644
index 69cad04..0000000
--- a/src/antlion/controllers/gnss_lib/GnssSimulator.py
+++ /dev/null
@@ -1,200 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Python module for General abstract GNSS Simulator.
-@author: Clay Liao (jianhsiungliao@)
-"""
-from time import sleep
-from antlion.controllers.spectracom_lib import gsg6
-from antlion.controllers.spirent_lib import gss7000
-from antlion import logger
-from antlion.utils import ping
-from antlion.libs.proc import job
-
-
-class AbstractGnssSimulator:
-    """General abstract GNSS Simulator"""
-
-    def __init__(self, simulator, ip_addr, ip_port, ip_port_ctrl=7717):
-        """Init AbstractGnssSimulator
-
-        Args:
-            simulator: GNSS simulator name,
-                Type, str
-                Option 'gss7000/gsg6'
-            ip_addr: IP Address.
-                Type, str
-            ip_port: TCPIP Port,
-                Type, str
-            ip_port_ctrl: TCPIP port,
-                Type, int
-                Default, 7717
-        """
-        self.simulator_name = str(simulator).lower()
-        self.ip_addr = ip_addr
-        self.ip_port = ip_port
-        self.ip_port_ctrl = ip_port_ctrl
-        self._logger = logger.create_tagged_trace_logger(
-            '%s %s:%s' % (simulator, self.ip_addr, self.ip_port))
-        if self.simulator_name == 'gsg6':
-            self._logger.info('GNSS simulator is GSG6')
-            self.simulator = gsg6.GSG6(self.ip_addr, self.ip_port)
-        elif self.simulator_name == 'gss7000':
-            self._logger.info('GNSS simulator is GSS7000')
-            self.simulator = gss7000.GSS7000(self.ip_addr, self.ip_port,
-                                             self.ip_port_ctrl)
-        else:
-            self._logger.error('No matched GNSS simulator')
-            raise AttributeError(
-                'The GNSS simulator in config file is {} which is not supported.'
-                .format(self.simulator_name))
-
-    def connect(self):
-        """Connect to GNSS Simulator"""
-        self._logger.debug('Connect to GNSS Simulator {}'.format(
-            self.simulator_name.upper()))
-        self.simulator.connect()
-
-    def close(self):
-        """Disconnect from GNSS Simulator"""
-        self._logger.debug('Disconnect from GNSS Simulator {}'.format(
-            self.simulator_name.upper()))
-        self.simulator.close()
-
-    def start_scenario(self, scenario=''):
-        """Start the running scenario.
-
-        Args:
-            scenario: path of scenario,
-                Type, str
-        """
-        self._logger.info('Start GNSS Scenario {}'.format(scenario))
-        self.simulator.start_scenario(scenario)
-
-    def stop_scenario(self):
-        """Stop the running scenario."""
-        self._logger.debug('Stop playing scenario')
-        self.simulator.stop_scenario()
-
-    def set_power(self, power_level=-130):
-        """Set scenario power level.
-        Args:
-            power_level: target power level in dBm for gsg6 or gss7000,
-                gsg6 power_level range is [-160, -65],
-                gss7000 power_level range is [-170, -115]
-                Type, float,
-        """
-        self.simulator.set_power(power_level)
-
-    def set_power_offset(self, gss7000_ant=1, pwr_offset=0):
-        """Set scenario power level offset based on reference level.
-           The default reference level is -130dBm for GPS L1.
-        Args:
-            ant: target gss7000 RF port,
-                Type, int
-            pwr_offset: target power offset in dB,
-                Type, float
-        """
-        if self.simulator_name == 'gsg6':
-            power_level = -130 + pwr_offset
-            self.simulator.set_power(power_level)
-        elif self.simulator_name == 'gss7000':
-            self.simulator.set_power_offset(gss7000_ant, pwr_offset)
-        else:
-            self._logger.error('No GNSS simulator is available')
-
-    def set_scenario_power(self,
-                           power_level,
-                           sat_id='',
-                           sat_system='',
-                           freq_band=''):
-        """Set dynamic power for the running scenario.
-
-        Args:
-            power_level: transmit power level
-                Type, float.
-                Decimal, unit [dBm]
-            sat_id: set power level for specific satellite identifiers
-                Type, str.
-                Option
-                    For GSG-6: 'Gxx/Rxx/Exx/Cxx/Jxx/Ixx/Sxxx'
-                    where xx is satellite identifiers number
-                    e.g.: G10
-                    For GSS7000: Provide SVID.
-                Default, '', assumed All.
-            sat_system: to set power level for all Satellites
-                Type, str
-                Option [GPS, GLO, GAL, BDS, QZSS, IRNSS, SBAS]
-                Default, '', assumed All.
-            freq_band: Frequency band to set the power level
-                Type, str
-                Default, '', assumed to be L1.
-         Raises:
-            RuntimeError: raise when instrument does not support this function.
-        """
-        self.simulator.set_scenario_power(power_level=power_level,
-                                          sat_id=sat_id,
-                                          sat_system=sat_system,
-                                          freq_band=freq_band)
-
-    def toggle_scenario_power(self,
-                              toggle_onoff='ON',
-                              sat_id='',
-                              sat_system=''):
-        """Toggle ON OFF scenario.
-
-        Args:
-            toggle_onoff: turn on or off the satellites
-                Type, str. Option ON/OFF
-                Default, 'ON'
-            sat_id: satellite identifiers
-                Type, str.
-                Option 'Gxx/Rxx/Exx/Cxx/Jxx/Ixx/Sxxx'
-                where xx is satellite identifiers no.
-                e.g.: G10
-            sat_system: to toggle On/OFF for all Satellites
-                Type, str
-                Option 'GPS/GLO/GAL'
-        """
-        # TODO: [b/208719212] Currently only support GSG-6. Will implement GSS7000 feature.
-        if self.simulator_name == 'gsg6':
-            self.simulator.toggle_scenario_power(toggle_onoff=toggle_onoff,
-                                                 sat_id=sat_id,
-                                                 sat_system=sat_system)
-        else:
-            raise RuntimeError('{} does not support this function'.format(
-                self.simulator_name))
-
-    def ping_inst(self, retry=3, wait=1):
-        """Ping IP of instrument to check if the connection is stable.
-        Args:
-            retry: Retry times.
-                Type, int.
-                Default, 3.
-            wait: Wait time between each ping command when ping fail is met.
-                Type, int.
-                Default, 1.
-        Return:
-            True/False of ping result.
-        """
-        for i in range(retry):
-            ret = ping(job, self.ip_addr)
-            self._logger.debug(f'Ping return results: {ret}')
-            if ret.get('packet_loss') == '0':
-                return True
-            self._logger.warning(f'Fail to ping GNSS Simulator: {i+1}')
-            sleep(wait)
-        return False
diff --git a/src/antlion/controllers/gnss_lib/__init__.py b/src/antlion/controllers/gnss_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/gnss_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/iperf_client.py b/src/antlion/controllers/iperf_client.py
index c4d8e1d..9ad6efc 100644
--- a/src/antlion/controllers/iperf_client.py
+++ b/src/antlion/controllers/iperf_client.py
@@ -30,8 +30,8 @@
 from antlion.controllers.utils_lib.ssh import settings
 from antlion.libs.proc import job
 
-MOBLY_CONTROLLER_CONFIG_NAME = 'IPerfClient'
-ACTS_CONTROLLER_REFERENCE_NAME = 'iperf_clients'
+MOBLY_CONTROLLER_CONFIG_NAME = "IPerfClient"
+ACTS_CONTROLLER_REFERENCE_NAME = "iperf_clients"
 
 
 class IPerfError(Exception):
@@ -51,14 +51,18 @@
     """
     results = []
     for c in configs:
-        if type(c) is dict and 'AndroidDevice' in c:
+        if type(c) is dict and "AndroidDevice" in c:
             results.append(
-                IPerfClientOverAdb(c['AndroidDevice'],
-                                   test_interface=c.get('test_interface')))
-        elif type(c) is dict and 'ssh_config' in c:
+                IPerfClientOverAdb(
+                    c["AndroidDevice"], test_interface=c.get("test_interface")
+                )
+            )
+        elif type(c) is dict and "ssh_config" in c:
             results.append(
-                IPerfClientOverSsh(c['ssh_config'],
-                                   test_interface=c.get('test_interface')))
+                IPerfClientOverSsh(
+                    c["ssh_config"], test_interface=c.get("test_interface")
+                )
+            )
         else:
             results.append(IPerfClient())
     return results
@@ -85,6 +89,7 @@
     multiple IPerfClients from writing results to the same file, as well
     as providing the interface for IPerfClient objects.
     """
+
     # Keeps track of the number of IPerfClient logs to prevent file name
     # collisions.
     __log_file_counter = 0
@@ -92,7 +97,7 @@
     __log_file_lock = threading.Lock()
 
     @staticmethod
-    def _get_full_file_path(tag=''):
+    def _get_full_file_path(tag=""):
         """Returns the full file path for the IPerfClient log file.
 
         Note: If the directory for the file path does not exist, it will be
@@ -102,14 +107,16 @@
             tag: The tag passed in to the server run.
         """
         current_context = context.get_current_context()
-        full_out_dir = os.path.join(current_context.get_full_output_path(),
-                                    'iperf_client_files')
+        full_out_dir = os.path.join(
+            current_context.get_full_output_path(), "iperf_client_files"
+        )
 
         with IPerfClientBase.__log_file_lock:
             os.makedirs(full_out_dir, exist_ok=True)
-            tags = ['IPerfClient', tag, IPerfClientBase.__log_file_counter]
-            out_file_name = '%s.log' % (','.join(
-                [str(x) for x in tags if x != '' and x is not None]))
+            tags = ["IPerfClient", tag, IPerfClientBase.__log_file_counter]
+            out_file_name = "%s.log" % (
+                ",".join([str(x) for x in tags if x != "" and x is not None])
+            )
             IPerfClientBase.__log_file_counter += 1
 
         return os.path.join(full_out_dir, out_file_name)
@@ -129,7 +136,7 @@
         Returns:
             full_out_path: iperf result path.
         """
-        raise NotImplementedError('start() must be implemented.')
+        raise NotImplementedError("start() must be implemented.")
 
 
 class IPerfClient(IPerfClientBase):
@@ -151,15 +158,16 @@
             full_out_path: iperf result path.
         """
         if not iperf_binary:
-            logging.debug('No iperf3 binary specified.  '
-                          'Assuming iperf3 is in the path.')
-            iperf_binary = 'iperf3'
+            logging.debug(
+                "No iperf3 binary specified.  " "Assuming iperf3 is in the path."
+            )
+            iperf_binary = "iperf3"
         else:
-            logging.debug('Using iperf3 binary located at %s' % iperf_binary)
-        iperf_cmd = [str(iperf_binary), '-c', ip] + iperf_args.split(' ')
+            logging.debug("Using iperf3 binary located at %s" % iperf_binary)
+        iperf_cmd = [str(iperf_binary), "-c", ip] + iperf_args.split(" ")
         full_out_path = self._get_full_file_path(tag)
 
-        with open(full_out_path, 'w') as out_file:
+        with open(full_out_path, "w") as out_file:
             subprocess.call(iperf_cmd, stdout=out_file)
 
         return full_out_path
@@ -168,17 +176,22 @@
 class IPerfClientOverSsh(IPerfClientBase):
     """Class that handles iperf3 client operations on remote machines."""
 
-    def __init__(self,
-                 ssh_config: str,
-                 test_interface: str = None,
-                 ssh_provider: SSHProvider = None):
+    def __init__(
+        self,
+        ssh_config: str,
+        test_interface: str = None,
+        ssh_provider: SSHProvider = None,
+    ):
         self._ssh_provider = ssh_provider
         if not self._ssh_provider:
             self._ssh_settings = settings.from_config(ssh_config)
-            if not (utils.is_valid_ipv4_address(self._ssh_settings.hostname) or
-                    utils.is_valid_ipv6_address(self._ssh_settings.hostname)):
+            if not (
+                utils.is_valid_ipv4_address(self._ssh_settings.hostname)
+                or utils.is_valid_ipv6_address(self._ssh_settings.hostname)
+            ):
                 mdns_ip = utils.get_fuchsia_mdns_ipv6_address(
-                    self._ssh_settings.hostname)
+                    self._ssh_settings.hostname
+                )
                 if mdns_ip:
                     self._ssh_settings.hostname = mdns_ip
         self._ssh_session = None
@@ -202,30 +215,30 @@
             full_out_path: iperf result path.
         """
         if not iperf_binary:
-            logging.debug('No iperf3 binary specified.  '
-                          'Assuming iperf3 is in the path.')
-            iperf_binary = 'iperf3'
+            logging.debug(
+                "No iperf3 binary specified.  " "Assuming iperf3 is in the path."
+            )
+            iperf_binary = "iperf3"
         else:
-            logging.debug('Using iperf3 binary located at %s' % iperf_binary)
-        iperf_cmd = '{} -c {} {}'.format(iperf_binary, ip, iperf_args)
+            logging.debug("Using iperf3 binary located at %s" % iperf_binary)
+        iperf_cmd = "{} -c {} {}".format(iperf_binary, ip, iperf_args)
         full_out_path = self._get_full_file_path(tag)
 
         try:
             self.start_ssh()
             if self._ssh_provider:
-                iperf_process = self._ssh_provider.run(iperf_cmd,
-                                                       timeout_sec=timeout)
+                iperf_process = self._ssh_provider.run(iperf_cmd, timeout_sec=timeout)
             else:
-                iperf_process = self._ssh_session.run(iperf_cmd,
-                                                      timeout=timeout)
+                iperf_process = self._ssh_session.run(iperf_cmd, timeout=timeout)
             iperf_output = iperf_process.stdout
-            with open(full_out_path, 'w') as out_file:
+            with open(full_out_path, "w") as out_file:
                 out_file.write(iperf_output)
         except socket.timeout:
-            raise TimeoutError('Socket timeout. Timed out waiting for iperf '
-                               'client to finish.')
+            raise TimeoutError(
+                "Socket timeout. Timed out waiting for iperf " "client to finish."
+            )
         except Exception as err:
-            logging.exception('iperf run failed: {}'.format(err))
+            logging.exception("iperf run failed: {}".format(err))
 
         return full_out_path
 
@@ -269,7 +282,8 @@
             return self._android_device_or_serial
         else:
             return _AndroidDeviceBridge.android_devices()[
-                self._android_device_or_serial]
+                self._android_device_or_serial
+            ]
 
     def start(self, ip, iperf_args, tag, timeout=3600, iperf_binary=None):
         """Starts iperf client, and waits for completion.
@@ -286,26 +300,25 @@
         Returns:
             The iperf result file path.
         """
-        clean_out = ''
+        clean_out = ""
         try:
             if not iperf_binary:
-                logging.debug('No iperf3 binary specified.  '
-                              'Assuming iperf3 is in the path.')
-                iperf_binary = 'iperf3'
+                logging.debug(
+                    "No iperf3 binary specified.  " "Assuming iperf3 is in the path."
+                )
+                iperf_binary = "iperf3"
             else:
-                logging.debug('Using iperf3 binary located at %s' %
-                              iperf_binary)
-            iperf_cmd = '{} -c {} {}'.format(iperf_binary, ip, iperf_args)
-            out = self._android_device.adb.shell(str(iperf_cmd),
-                                                 timeout=timeout)
-            clean_out = out.split('\n')
-            if 'error' in clean_out[0].lower():
+                logging.debug("Using iperf3 binary located at %s" % iperf_binary)
+            iperf_cmd = "{} -c {} {}".format(iperf_binary, ip, iperf_args)
+            out = self._android_device.adb.shell(str(iperf_cmd), timeout=timeout)
+            clean_out = out.split("\n")
+            if "error" in clean_out[0].lower():
                 raise IPerfError(clean_out)
         except (job.TimeoutError, AdbCommandError):
-            logging.warning('TimeoutError: Iperf measurement failed.')
+            logging.warning("TimeoutError: Iperf measurement failed.")
 
         full_out_path = self._get_full_file_path(tag)
-        with open(full_out_path, 'w') as out_file:
-            out_file.write('\n'.join(clean_out))
+        with open(full_out_path, "w") as out_file:
+            out_file.write("\n".join(clean_out))
 
         return full_out_path
diff --git a/src/antlion/controllers/iperf_server.py b/src/antlion/controllers/iperf_server.py
index b1311ff..20dcfbf 100755
--- a/src/antlion/controllers/iperf_server.py
+++ b/src/antlion/controllers/iperf_server.py
@@ -35,8 +35,8 @@
 from antlion.event.event import TestClassEndEvent
 from antlion.libs.proc import job
 
-MOBLY_CONTROLLER_CONFIG_NAME = 'IPerfServer'
-ACTS_CONTROLLER_REFERENCE_NAME = 'iperf_servers'
+MOBLY_CONTROLLER_CONFIG_NAME = "IPerfServer"
+ACTS_CONTROLLER_REFERENCE_NAME = "iperf_servers"
 KILOBITS = 1024
 MEGABITS = KILOBITS * 1024
 GIGABITS = MEGABITS * 1024
@@ -44,7 +44,7 @@
 
 
 def create(configs):
-    """ Factory method for iperf servers.
+    """Factory method for iperf servers.
 
     The function creates iperf servers based on at least one config.
     If configs only specify a port number, a regular local IPerfServer object
@@ -58,18 +58,22 @@
     for c in configs:
         if type(c) in (str, int) and str(c).isdigit():
             results.append(IPerfServer(int(c)))
-        elif type(c) is dict and 'AndroidDevice' in c and 'port' in c:
-            results.append(IPerfServerOverAdb(c['AndroidDevice'], c['port']))
-        elif type(c) is dict and 'ssh_config' in c and 'port' in c:
+        elif type(c) is dict and "AndroidDevice" in c and "port" in c:
+            results.append(IPerfServerOverAdb(c["AndroidDevice"], c["port"]))
+        elif type(c) is dict and "ssh_config" in c and "port" in c:
             results.append(
-                IPerfServerOverSsh(c['ssh_config'],
-                                   c['port'],
-                                   test_interface=c.get('test_interface'),
-                                   use_killall=c.get('use_killall')))
+                IPerfServerOverSsh(
+                    settings.from_config(c["ssh_config"]),
+                    c["port"],
+                    test_interface=c.get("test_interface"),
+                    use_killall=c.get("use_killall"),
+                )
+            )
         else:
             raise ValueError(
-                'Config entry %s in %s is not a valid IPerfServer '
-                'config.' % (repr(c), configs))
+                "Config entry %s in %s is not a valid IPerfServer "
+                "config." % (repr(c), configs)
+            )
     return results
 
 
@@ -87,12 +91,11 @@
         try:
             iperf_server.stop()
         except Exception:
-            logging.exception('Unable to properly clean up %s.' % iperf_server)
+            logging.exception("Unable to properly clean up %s." % iperf_server)
 
 
 class IPerfResult(object):
-
-    def __init__(self, result_path, reporting_speed_units='Mbytes'):
+    def __init__(self, result_path, reporting_speed_units="Mbytes"):
         """Loads iperf result from file.
 
         Loads iperf result from JSON formatted server log. File can be accessed
@@ -106,20 +109,19 @@
             self.result = json.loads(result_path)
         else:
             try:
-                with open(result_path, 'r') as f:
+                with open(result_path, "r") as f:
                     iperf_output = f.readlines()
-                    if '}\n' in iperf_output:
-                        iperf_output = iperf_output[:iperf_output.index('}\n'
-                                                                        ) + 1]
-                    iperf_string = ''.join(iperf_output)
-                    iperf_string = iperf_string.replace('nan', '0')
+                    if "}\n" in iperf_output:
+                        iperf_output = iperf_output[: iperf_output.index("}\n") + 1]
+                    iperf_string = "".join(iperf_output)
+                    iperf_string = iperf_string.replace("nan", "0")
                     self.result = json.loads(iperf_string)
             except ValueError:
-                with open(result_path, 'r') as f:
+                with open(result_path, "r") as f:
                     # Possibly a result from interrupted iperf run,
                     # skip first line and try again.
                     lines = f.readlines()[1:]
-                    self.result = json.loads(''.join(lines))
+                    self.result = json.loads("".join(lines))
 
     def _has_data(self):
         """Checks if the iperf result has valid throughput data.
@@ -127,8 +129,9 @@
         Returns:
             True if the result contains throughput data. False otherwise.
         """
-        return ('end' in self.result) and ('sum_received' in self.result['end']
-                                           or 'sum' in self.result['end'])
+        return ("end" in self.result) and (
+            "sum_received" in self.result["end"] or "sum" in self.result["end"]
+        )
 
     def _get_reporting_speed(self, network_speed_in_bits_per_second):
         """Sets the units for the network speed reporting based on how the
@@ -146,13 +149,13 @@
             The value of the throughput in the appropriate units.
         """
         speed_divisor = 1
-        if self.reporting_speed_units[1:].lower() == 'bytes':
+        if self.reporting_speed_units[1:].lower() == "bytes":
             speed_divisor = speed_divisor * BITS_IN_BYTE
-        if self.reporting_speed_units[0:1].lower() == 'k':
+        if self.reporting_speed_units[0:1].lower() == "k":
             speed_divisor = speed_divisor * KILOBITS
-        if self.reporting_speed_units[0:1].lower() == 'm':
+        if self.reporting_speed_units[0:1].lower() == "m":
             speed_divisor = speed_divisor * MEGABITS
-        if self.reporting_speed_units[0:1].lower() == 'g':
+        if self.reporting_speed_units[0:1].lower() == "g":
             speed_divisor = speed_divisor * GIGABITS
         return network_speed_in_bits_per_second / speed_divisor
 
@@ -162,7 +165,7 @@
 
     @property
     def error(self):
-        return self.result.get('error', None)
+        return self.result.get("error", None)
 
     @property
     def avg_rate(self):
@@ -174,9 +177,9 @@
         quality of the link. If the result is not from a success run, this
         property is None.
         """
-        if not self._has_data() or 'sum' not in self.result['end']:
+        if not self._has_data() or "sum" not in self.result["end"]:
             return None
-        bps = self.result['end']['sum']['bits_per_second']
+        bps = self.result["end"]["sum"]["bits_per_second"]
         return self._get_reporting_speed(bps)
 
     @property
@@ -186,9 +189,9 @@
         This data may not exist if iperf was interrupted. If the result is not
         from a success run, this property is None.
         """
-        if not self._has_data() or 'sum_received' not in self.result['end']:
+        if not self._has_data() or "sum_received" not in self.result["end"]:
             return None
-        bps = self.result['end']['sum_received']['bits_per_second']
+        bps = self.result["end"]["sum_received"]["bits_per_second"]
         return self._get_reporting_speed(bps)
 
     @property
@@ -198,9 +201,9 @@
         This data may not exist if iperf was interrupted. If the result is not
         from a success run, this property is None.
         """
-        if not self._has_data() or 'sum_sent' not in self.result['end']:
+        if not self._has_data() or "sum_sent" not in self.result["end"]:
             return None
-        bps = self.result['end']['sum_sent']['bits_per_second']
+        bps = self.result["end"]["sum_sent"]["bits_per_second"]
         return self._get_reporting_speed(bps)
 
     @property
@@ -213,8 +216,8 @@
         if not self._has_data():
             return None
         intervals = [
-            self._get_reporting_speed(interval['sum']['bits_per_second'])
-            for interval in self.result['intervals']
+            self._get_reporting_speed(interval["sum"]["bits_per_second"])
+            for interval in self.result["intervals"]
         ]
         return intervals
 
@@ -245,13 +248,10 @@
         """
         if not self._has_data():
             return None
-        instantaneous_rates = self.instantaneous_rates[
-            iperf_ignored_interval:-1]
+        instantaneous_rates = self.instantaneous_rates[iperf_ignored_interval:-1]
         avg_rate = math.fsum(instantaneous_rates) / len(instantaneous_rates)
-        sqd_deviations = ([(rate - avg_rate)**2
-                           for rate in instantaneous_rates])
-        std_dev = math.sqrt(
-            math.fsum(sqd_deviations) / (len(sqd_deviations) - 1))
+        sqd_deviations = [(rate - avg_rate) ** 2 for rate in instantaneous_rates]
+        std_dev = math.sqrt(math.fsum(sqd_deviations) / (len(sqd_deviations) - 1))
         return std_dev
 
 
@@ -272,13 +272,13 @@
 
     @property
     def port(self):
-        raise NotImplementedError('port must be specified.')
+        raise NotImplementedError("port must be specified.")
 
     @property
     def started(self):
-        raise NotImplementedError('started must be specified.')
+        raise NotImplementedError("started must be specified.")
 
-    def start(self, extra_args='', tag=''):
+    def start(self, extra_args="", tag=""):
         """Starts an iperf3 server.
 
         Args:
@@ -287,7 +287,7 @@
             tag: Appended to log file name to identify logs from different
                 iperf runs.
         """
-        raise NotImplementedError('start() must be specified.')
+        raise NotImplementedError("start() must be specified.")
 
     def stop(self):
         """Stops the iperf server.
@@ -295,7 +295,7 @@
         Returns:
             The name of the log file generated from the terminated session.
         """
-        raise NotImplementedError('stop() must be specified.')
+        raise NotImplementedError("stop() must be specified.")
 
     def _get_full_file_path(self, tag=None):
         """Returns the full file path for the IPerfServer log file.
@@ -310,8 +310,9 @@
 
         with IPerfServerBase.__log_file_lock:
             tags = [tag, IPerfServerBase.__log_file_counter]
-            out_file_name = 'IPerfServer,%s.log' % (','.join(
-                [str(x) for x in tags if x != '' and x is not None]))
+            out_file_name = "IPerfServer,%s.log" % (
+                ",".join([str(x) for x in tags if x != "" and x is not None])
+            )
             IPerfServerBase.__log_file_counter += 1
 
         file_path = os.path.join(out_dir, out_file_name)
@@ -321,8 +322,9 @@
     @property
     def log_path(self):
         current_context = context.get_current_context()
-        full_out_dir = os.path.join(current_context.get_full_output_path(),
-                                    'IPerfServer%s' % self.port)
+        full_out_dir = os.path.join(
+            current_context.get_full_output_path(), "IPerfServer%s" % self.port
+        )
 
         # Ensure the directory exists.
         os.makedirs(full_out_dir, exist_ok=True)
@@ -332,14 +334,14 @@
 
 def _get_port_from_ss_output(ss_output, pid):
     pid = str(pid)
-    lines = ss_output.split('\n')
+    lines = ss_output.split("\n")
     for line in lines:
         if pid in line:
             # Expected format:
             # tcp LISTEN  0 5 *:<PORT>  *:* users:(("cmd",pid=<PID>,fd=3))
-            return line.split()[4].split(':')[-1]
+            return line.split()[4].split(":")[-1]
     else:
-        raise ProcessLookupError('Could not find started iperf3 process.')
+        raise ProcessLookupError("Could not find started iperf3 process.")
 
 
 class IPerfServer(IPerfServerBase):
@@ -360,7 +362,7 @@
     def started(self):
         return self._iperf_process is not None
 
-    def start(self, extra_args='', tag=''):
+    def start(self, extra_args="", tag=""):
         """Starts iperf server on local machine.
 
         Args:
@@ -375,28 +377,30 @@
         self._current_log_file = self._get_full_file_path(tag)
 
         # Run an iperf3 server on the hinted port with JSON output.
-        command = ['iperf3', '-s', '-p', str(self._hinted_port), '-J']
+        command = ["iperf3", "-s", "-p", str(self._hinted_port), "-J"]
 
         command.extend(shlex.split(extra_args))
 
         if self._last_opened_file:
             self._last_opened_file.close()
-        self._last_opened_file = open(self._current_log_file, 'w')
-        self._iperf_process = subprocess.Popen(command,
-                                               stdout=self._last_opened_file,
-                                               stderr=subprocess.DEVNULL)
+        self._last_opened_file = open(self._current_log_file, "w")
+        self._iperf_process = subprocess.Popen(
+            command, stdout=self._last_opened_file, stderr=subprocess.DEVNULL
+        )
         for attempts_left in reversed(range(3)):
             try:
                 self._port = int(
                     _get_port_from_ss_output(
-                        job.run('ss -l -p -n | grep iperf').stdout,
-                        self._iperf_process.pid))
+                        job.run("ss -l -p -n | grep iperf").stdout,
+                        self._iperf_process.pid,
+                    )
+                )
                 break
             except ProcessLookupError:
                 if attempts_left == 0:
                     raise
-                logging.debug('iperf3 process not started yet.')
-                time.sleep(.01)
+                logging.debug("iperf3 process not started yet.")
+                time.sleep(0.01)
 
     def stop(self):
         """Stops the iperf server.
@@ -423,28 +427,28 @@
 class IPerfServerOverSsh(IPerfServerBase):
     """Class that handles iperf3 operations on remote machines."""
 
-    def __init__(self,
-                 ssh_config,
-                 port,
-                 test_interface=None,
-                 use_killall=False):
+    def __init__(self, ssh_settings, port, test_interface=None, use_killall=False):
         super().__init__(port)
-        self.ssh_settings = settings.from_config(ssh_config)
+        self.ssh_settings = ssh_settings
         self.log = acts_logger.create_tagged_trace_logger(
-            f'IPerfServer | {self.ssh_settings.hostname}')
+            f"IPerfServer | {self.ssh_settings.hostname}"
+        )
         self._ssh_session = None
         self.start_ssh()
 
         self._iperf_pid = None
         self._current_tag = None
         self.hostname = self.ssh_settings.hostname
-        self._use_killall = str(use_killall).lower() == 'true'
+        self._use_killall = str(use_killall).lower() == "true"
         try:
             # A test interface can only be found if an ip address is specified.
             # A fully qualified hostname will return None for the
             # test_interface.
-            self.test_interface = test_interface if test_interface else utils.get_interface_based_on_ip(
-                self._ssh_session, self.hostname)
+            self.test_interface = (
+                test_interface
+                if test_interface
+                else utils.get_interface_based_on_ip(self._ssh_session, self.hostname)
+            )
         except Exception as e:
             self.log.warning(e)
             self.test_interface = None
@@ -458,7 +462,7 @@
         return self._iperf_pid is not None
 
     def _get_remote_log_path(self):
-        return '/tmp/iperf_server_port%s.log' % self.port
+        return "/tmp/iperf_server_port%s.log" % self.port
 
     def get_interface_ip_addresses(self, interface):
         """Gets all of the ip addresses, ipv4 and ipv6, associated with a
@@ -485,28 +489,29 @@
             self.start_ssh()
         utils.renew_linux_ip_address(self._ssh_session, self.test_interface)
 
-    def get_addr(self, addr_type='ipv4_private', timeout_sec=None):
+    def get_addr(self, addr_type="ipv4_private", timeout_sec=None):
         """Wait until a type of IP address on the test interface is available
         then return it.
         """
         if not self._ssh_session:
             self.start_ssh()
-        return utils.get_addr(self._ssh_session, self.test_interface,
-                              addr_type, timeout_sec)
+        return utils.get_addr(
+            self._ssh_session, self.test_interface, addr_type, timeout_sec
+        )
 
     def _cleanup_iperf_port(self):
         """Checks and kills zombie iperf servers occupying intended port."""
-        iperf_check_cmd = ('netstat -tulpn | grep LISTEN | grep iperf3'
-                           ' | grep :{}').format(self.port)
-        iperf_check = self._ssh_session.run(iperf_check_cmd,
-                                            ignore_status=True)
+        iperf_check_cmd = (
+            "netstat -tulpn | grep LISTEN | grep iperf3" " | grep :{}"
+        ).format(self.port)
+        iperf_check = self._ssh_session.run(iperf_check_cmd, ignore_status=True)
         iperf_check = iperf_check.stdout
         if iperf_check:
-            logging.debug('Killing zombie server on port {}'.format(self.port))
-            iperf_pid = iperf_check.split(' ')[-1].split('/')[0]
-            self._ssh_session.run('kill -9 {}'.format(str(iperf_pid)))
+            logging.debug("Killing zombie server on port {}".format(self.port))
+            iperf_pid = iperf_check.split(" ")[-1].split("/")[0]
+            self._ssh_session.run("kill -9 {}".format(str(iperf_pid)))
 
-    def start(self, extra_args='', tag='', iperf_binary=None):
+    def start(self, extra_args="", tag="", iperf_binary=None):
         """Starts iperf server on specified machine and port.
 
         Args:
@@ -524,17 +529,19 @@
             self.start_ssh()
         self._cleanup_iperf_port()
         if not iperf_binary:
-            logging.debug('No iperf3 binary specified.  '
-                          'Assuming iperf3 is in the path.')
-            iperf_binary = 'iperf3'
+            logging.debug(
+                "No iperf3 binary specified.  " "Assuming iperf3 is in the path."
+            )
+            iperf_binary = "iperf3"
         else:
-            logging.debug('Using iperf3 binary located at %s' % iperf_binary)
-        iperf_command = '{} -s -J -p {}'.format(iperf_binary, self.port)
+            logging.debug("Using iperf3 binary located at %s" % iperf_binary)
+        iperf_command = "{} -s -J -p {}".format(iperf_binary, self.port)
 
-        cmd = '{cmd} {extra_flags} > {log_file}'.format(
+        cmd = "{cmd} {extra_flags} > {log_file}".format(
             cmd=iperf_command,
             extra_flags=extra_args,
-            log_file=self._get_remote_log_path())
+            log_file=self._get_remote_log_path(),
+        )
 
         job_result = self._ssh_session.run_async(cmd)
         self._iperf_pid = job_result.stdout
@@ -550,20 +557,19 @@
             return
 
         if self._use_killall:
-            self._ssh_session.run('killall iperf3', ignore_status=True)
+            self._ssh_session.run("killall iperf3", ignore_status=True)
         else:
-            self._ssh_session.run_async('kill -9 {}'.format(
-                str(self._iperf_pid)))
+            self._ssh_session.run_async("kill -9 {}".format(str(self._iperf_pid)))
 
-        iperf_result = self._ssh_session.run('cat {}'.format(
-            self._get_remote_log_path()))
+        iperf_result = self._ssh_session.run(
+            "cat {}".format(self._get_remote_log_path())
+        )
 
         log_file = self._get_full_file_path(self._current_tag)
-        with open(log_file, 'w') as f:
+        with open(log_file, "w") as f:
             f.write(iperf_result.stdout)
 
-        self._ssh_session.run_async('rm {}'.format(
-            self._get_remote_log_path()))
+        self._ssh_session.run_async("rm {}".format(self._get_remote_log_path()))
         self._iperf_pid = None
         return log_file
 
@@ -613,8 +619,7 @@
         }
 
 
-event_bus.register_subscription(
-    _AndroidDeviceBridge.on_test_begin.subscription)
+event_bus.register_subscription(_AndroidDeviceBridge.on_test_begin.subscription)
 event_bus.register_subscription(_AndroidDeviceBridge.on_test_end.subscription)
 
 
@@ -635,7 +640,7 @@
         self._android_device_or_serial = android_device_or_serial
 
         self._iperf_process = None
-        self._current_tag = ''
+        self._current_tag = ""
 
     @property
     def port(self):
@@ -651,12 +656,13 @@
             return self._android_device_or_serial
         else:
             return _AndroidDeviceBridge.android_devices()[
-                self._android_device_or_serial]
+                self._android_device_or_serial
+            ]
 
     def _get_device_log_path(self):
-        return '~/data/iperf_server_port%s.log' % self.port
+        return "~/data/iperf_server_port%s.log" % self.port
 
-    def start(self, extra_args='', tag='', iperf_binary=None):
+    def start(self, extra_args="", tag="", iperf_binary=None):
         """Starts iperf server on an ADB device.
 
         Args:
@@ -671,23 +677,27 @@
             return
 
         if not iperf_binary:
-            logging.debug('No iperf3 binary specified.  '
-                          'Assuming iperf3 is in the path.')
-            iperf_binary = 'iperf3'
+            logging.debug(
+                "No iperf3 binary specified.  " "Assuming iperf3 is in the path."
+            )
+            iperf_binary = "iperf3"
         else:
-            logging.debug('Using iperf3 binary located at %s' % iperf_binary)
-        iperf_command = '{} -s -J -p {}'.format(iperf_binary, self.port)
+            logging.debug("Using iperf3 binary located at %s" % iperf_binary)
+        iperf_command = "{} -s -J -p {}".format(iperf_binary, self.port)
 
         self._iperf_process = self._android_device.adb.shell_nb(
-            '{cmd} {extra_flags} > {log_file} 2>&1'.format(
+            "{cmd} {extra_flags} > {log_file} 2>&1".format(
                 cmd=iperf_command,
                 extra_flags=extra_args,
-                log_file=self._get_device_log_path()))
+                log_file=self._get_device_log_path(),
+            )
+        )
 
-        self._iperf_process_adb_pid = ''
+        self._iperf_process_adb_pid = ""
         while len(self._iperf_process_adb_pid) == 0:
             self._iperf_process_adb_pid = self._android_device.adb.shell(
-                'pgrep iperf3 -n')
+                "pgrep iperf3 -n"
+            )
 
         self._current_tag = tag
 
@@ -700,26 +710,27 @@
         if self._iperf_process is None:
             return
 
-        job.run('kill -9 {}'.format(self._iperf_process.pid))
+        job.run("kill -9 {}".format(self._iperf_process.pid))
 
         # TODO(markdr): update with definitive kill method
         while True:
-            iperf_process_list = self._android_device.adb.shell('pgrep iperf3')
+            iperf_process_list = self._android_device.adb.shell("pgrep iperf3")
             if iperf_process_list.find(self._iperf_process_adb_pid) == -1:
                 break
             else:
-                self._android_device.adb.shell("kill -9 {}".format(
-                    self._iperf_process_adb_pid))
+                self._android_device.adb.shell(
+                    "kill -9 {}".format(self._iperf_process_adb_pid)
+                )
 
-        iperf_result = self._android_device.adb.shell('cat {}'.format(
-            self._get_device_log_path()))
+        iperf_result = self._android_device.adb.shell(
+            "cat {}".format(self._get_device_log_path())
+        )
 
         log_file = self._get_full_file_path(self._current_tag)
-        with open(log_file, 'w') as f:
+        with open(log_file, "w") as f:
             f.write(iperf_result)
 
-        self._android_device.adb.shell('rm {}'.format(
-            self._get_device_log_path()))
+        self._android_device.adb.shell("rm {}".format(self._get_device_log_path()))
 
         self._iperf_process = None
         return log_file
diff --git a/src/antlion/controllers/monsoon.py b/src/antlion/controllers/monsoon.py
deleted file mode 100644
index a14f4fd..0000000
--- a/src/antlion/controllers/monsoon.py
+++ /dev/null
@@ -1,77 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-from antlion.controllers.monsoon_lib.api.hvpm.monsoon import Monsoon as HvpmMonsoon
-from antlion.controllers.monsoon_lib.api.lvpm_stock.monsoon import \
-    Monsoon as LvpmStockMonsoon
-
-MOBLY_CONTROLLER_CONFIG_NAME = 'Monsoon'
-ACTS_CONTROLLER_REFERENCE_NAME = 'monsoons'
-
-
-def create(configs):
-    """Takes a list of Monsoon configs and returns Monsoon Controllers.
-
-    Args:
-        configs: A list of serial numbers, or dicts in the form:
-            {
-                'type': anyof('LvpmStockMonsoon', 'HvpmMonsoon')
-                'serial': int
-            }
-
-    Returns:
-        a list of Monsoon configs
-
-    Raises:
-        ValueError if the configuration does not provide the required info.
-    """
-    objs = []
-    for config in configs:
-        monsoon_type = None
-        if isinstance(config, dict):
-            if isinstance(config.get('type', None), str):
-                if 'lvpm' in config['type'].lower():
-                    monsoon_type = LvpmStockMonsoon
-                elif 'hvpm' in config['type'].lower():
-                    monsoon_type = HvpmMonsoon
-                else:
-                    raise ValueError('Unknown monsoon type %s in Monsoon '
-                                     'config %s' % (config['type'], config))
-            if 'serial' not in config:
-                raise ValueError('Monsoon config must specify "serial".')
-            serial_number = int(config.get('serial'))
-        else:
-            serial_number = int(config)
-        if monsoon_type is None:
-            if serial_number < 20000:
-                # This code assumes the LVPM has firmware version 20. If
-                # someone has updated the firmware, or somehow found an older
-                # version, the power measurement will fail.
-                monsoon_type = LvpmStockMonsoon
-            else:
-                monsoon_type = HvpmMonsoon
-
-        objs.append(monsoon_type(serial=serial_number))
-    return objs
-
-
-def destroy(monsoons):
-    for monsoon in monsoons:
-        if monsoon.is_allocated():
-            monsoon.release_monsoon_connection()
-
-
-def get_info(monsoons):
-    return [{'serial': monsoon.serial} for monsoon in monsoons]
diff --git a/src/antlion/controllers/monsoon_lib/__init__.py b/src/antlion/controllers/monsoon_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/monsoon_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/monsoon_lib/api/__init__.py b/src/antlion/controllers/monsoon_lib/api/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/monsoon_lib/api/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/monsoon_lib/api/common.py b/src/antlion/controllers/monsoon_lib/api/common.py
deleted file mode 100644
index 70e0a0a..0000000
--- a/src/antlion/controllers/monsoon_lib/api/common.py
+++ /dev/null
@@ -1,157 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.signals import ControllerError
-
-
-class MonsoonError(ControllerError):
-    """Raised for exceptions encountered when interfacing with a Monsoon device.
-    """
-
-
-class PassthroughStates(object):
-    """An enum containing the values for power monitor's passthrough states."""
-    # "Off" or 0 means USB always off.
-    OFF = 0
-    # "On" or 1 means USB always on.
-    ON = 1
-    # "Auto" or 2 means USB is automatically turned off during sampling, and
-    # turned back on after sampling.
-    AUTO = 2
-
-
-PASSTHROUGH_STATES = {
-    'off': PassthroughStates.OFF,
-    'on': PassthroughStates.ON,
-    'auto': PassthroughStates.AUTO
-}
-
-
-class MonsoonDataRecord(object):
-    """A data class for Monsoon data points."""
-    def __init__(self, sample_time, relative_time, current):
-        """Creates a new MonsoonDataRecord.
-
-        Args:
-            sample_time: the unix timestamp of the sample.
-            relative_time: the time since the start of the measurement.
-            current: The current in Amperes as a string.
-        """
-        self._sample_time = sample_time
-        self._relative_time = relative_time
-        self._current = current
-
-    @property
-    def time(self):
-        """The time the record was fetched."""
-        return self._sample_time
-
-    @property
-    def relative_time(self):
-        """The time the record was fetched, relative to collection start."""
-        return self._relative_time
-
-    @property
-    def current(self):
-        """The amount of current in Amperes measured for the given record."""
-        return self._current
-
-
-class MonsoonResult(object):
-    """An object that contains aggregated data collected during sampling.
-
-    Attributes:
-        _num_samples: The number of samples gathered.
-        _sum_currents: The total sum of all current values gathered, in amperes.
-        _hz: The frequency sampling is being done at.
-        _voltage: The voltage output during sampling.
-    """
-
-    # The number of decimal places to round a value to.
-    ROUND_TO = 6
-
-    def __init__(self, num_samples, sum_currents, hz, voltage, datafile_path):
-        """Creates a new MonsoonResult.
-
-        Args:
-            num_samples: the number of samples collected.
-            sum_currents: the total summation of every current measurement.
-            hz: the number of samples per second.
-            voltage: the voltage used during the test.
-            datafile_path: the path to the monsoon data file.
-        """
-        self._num_samples = num_samples
-        self._sum_currents = sum_currents
-        self._hz = hz
-        self._voltage = voltage
-        self.tag = datafile_path
-
-    def get_data_points(self):
-        """Returns an iterator of MonsoonDataRecords."""
-        class MonsoonDataIterator:
-            def __init__(self, file):
-                self.file = file
-
-            def __iter__(self):
-                with open(self.file, 'r') as f:
-                    start_time = None
-                    for line in f:
-                        # Remove the newline character.
-                        line.strip()
-                        sample_time, current = map(float, line.split(' '))
-                        if start_time is None:
-                            start_time = sample_time
-                        yield MonsoonDataRecord(sample_time,
-                                                sample_time - start_time,
-                                                current)
-
-        return MonsoonDataIterator(self.tag)
-
-    @property
-    def num_samples(self):
-        """The number of samples recorded during the test."""
-        return self._num_samples
-
-    @property
-    def average_current(self):
-        """Average current in mA."""
-        if self.num_samples == 0:
-            return 0
-        return round(self._sum_currents * 1000 / self.num_samples,
-                     self.ROUND_TO)
-
-    @property
-    def total_charge(self):
-        """Total charged used in the unit of mAh."""
-        return round((self._sum_currents / self._hz) * 1000 / 3600,
-                     self.ROUND_TO)
-
-    @property
-    def total_power(self):
-        """Total power used."""
-        return round(self.average_current * self._voltage, self.ROUND_TO)
-
-    @property
-    def voltage(self):
-        """The voltage during the measurement (in Volts)."""
-        return self._voltage
-
-    def __str__(self):
-        return ('avg current: %s\n'
-                'total charge: %s\n'
-                'total power: %s\n'
-                'total samples: %s' % (self.average_current, self.total_charge,
-                                      self.total_power, self._num_samples))
diff --git a/src/antlion/controllers/monsoon_lib/api/hvpm/__init__.py b/src/antlion/controllers/monsoon_lib/api/hvpm/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/monsoon_lib/api/hvpm/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/monsoon_lib/api/hvpm/monsoon.py b/src/antlion/controllers/monsoon_lib/api/hvpm/monsoon.py
deleted file mode 100644
index 44afba2..0000000
--- a/src/antlion/controllers/monsoon_lib/api/hvpm/monsoon.py
+++ /dev/null
@@ -1,166 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import multiprocessing
-import time
-
-from Monsoon import HVPM
-from Monsoon import Operations as op
-
-from antlion.controllers.monsoon_lib.api.common import MonsoonResult
-from antlion.controllers.monsoon_lib.api.monsoon import BaseMonsoon
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import AssemblyLineBuilder
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import ThreadAssemblyLine
-from antlion.controllers.monsoon_lib.sampling.engine.transformers import DownSampler
-from antlion.controllers.monsoon_lib.sampling.engine.transformers import SampleAggregator
-from antlion.controllers.monsoon_lib.sampling.engine.transformers import Tee
-from antlion.controllers.monsoon_lib.sampling.hvpm.transformers import HvpmTransformer
-
-
-class Monsoon(BaseMonsoon):
-    """The controller class for interacting with the HVPM Monsoon."""
-
-    # The device doesn't officially support voltages lower than this. Note that
-    # 0 is a valid voltage.
-    MIN_VOLTAGE = 0.8
-
-    # The Monsoon doesn't support setting higher voltages than this directly
-    # without tripping overvoltage.
-    # Note that it is possible to increase the voltage above this value by
-    # increasing the voltage by small increments over a period of time.
-    # The communication protocol supports up to 16V.
-    MAX_VOLTAGE = 13.5
-
-    def __init__(self, serial):
-        super().__init__()
-        self.serial = serial
-        self._mon = HVPM.Monsoon()
-        self._mon.setup_usb(serial)
-        self._allocated = True
-        if self._mon.Protocol.DEVICE is None:
-            raise ValueError('HVPM Monsoon %s could not be found.' % serial)
-
-    def set_voltage(self, voltage):
-        """Sets the output voltage of monsoon.
-
-        Args:
-            voltage: The voltage to set the output to.
-        """
-        self._log.debug('Setting voltage to %sV.' % voltage)
-        self._mon.setVout(voltage)
-
-    def set_max_current(self, amperes):
-        """Sets monsoon's max output current.
-
-        Args:
-            amperes: The max current in A.
-        """
-        self._mon.setRunTimeCurrentLimit(amperes)
-
-    def set_max_initial_current(self, amperes):
-        """Sets the max power-up/initial current.
-
-        Args:
-            amperes: The max initial current allowed in amperes.
-        """
-        self._mon.setPowerUpCurrentLimit(amperes)
-
-    @property
-    def status(self):
-        """Gets the status params of monsoon.
-
-        Returns:
-            A dictionary of {status param, value} key-value pairs.
-        """
-        self._mon.fillStatusPacket()
-        return self._mon.statusPacket
-
-    def _set_usb_passthrough_mode(self, mode):
-        """Sends the call to set usb passthrough mode.
-
-        Args:
-            mode: The state to set the USB passthrough to. Can either be the
-                string name of the state or the integer value.
-
-                "Off" or 0 means USB always off.
-                "On" or 1 means USB always on.
-                "Auto" or 2 means USB is automatically turned off during
-                    sampling, and turned back on after sampling.
-        """
-        self._mon.setUSBPassthroughMode(mode)
-
-    def _get_main_voltage(self):
-        """Returns the value of the voltage on the main channel."""
-        # Any getValue call on a setX function will return the value set for X.
-        # Using this, we can pull the last setMainVoltage (or its default).
-        return (self._mon.Protocol.getValue(op.OpCodes.setMainVoltage, 4) /
-                op.Conversion.FLOAT_TO_INT)
-
-    def measure_power(self,
-                      duration,
-                      measure_after_seconds=0,
-                      hz=5000,
-                      output_path=None,
-                      transformers=None):
-        """See parent docstring for details."""
-        voltage = self._get_main_voltage()
-
-        aggregator = SampleAggregator(measure_after_seconds)
-        manager = multiprocessing.Manager()
-
-        assembly_line_builder = AssemblyLineBuilder(manager.Queue,
-                                                    ThreadAssemblyLine)
-        assembly_line_builder.source(
-            HvpmTransformer(self.serial, duration + measure_after_seconds))
-        if hz != 5000:
-            assembly_line_builder.into(DownSampler(int(5000 / hz)))
-        if output_path:
-            assembly_line_builder.into(Tee(output_path, measure_after_seconds))
-        assembly_line_builder.into(aggregator)
-        if transformers:
-            for transformer in transformers:
-                assembly_line_builder.into(transformer)
-
-        self.take_samples(assembly_line_builder.build())
-
-        manager.shutdown()
-
-        self._mon.setup_usb(self.serial)
-        self._allocated = True
-        monsoon_data = MonsoonResult(aggregator.num_samples,
-                                     aggregator.sum_currents, hz, voltage,
-                                     output_path)
-        self._log.info('Measurement summary:\n%s', str(monsoon_data))
-        return monsoon_data
-
-    def reconnect_monsoon(self):
-        """Reconnect Monsoon to serial port."""
-        self.release_monsoon_connection()
-        self._log.info('Closed monsoon connection.')
-        time.sleep(5)
-        self.establish_monsoon_connection()
-
-    def release_monsoon_connection(self):
-        self._mon.closeDevice()
-        self._allocated = False
-
-    def is_allocated(self):
-        return self._allocated
-
-    def establish_monsoon_connection(self):
-        self._mon.setup_usb(self.serial)
-        # Makes sure the Monsoon is in the command-receiving state.
-        self._mon.stopSampling()
diff --git a/src/antlion/controllers/monsoon_lib/api/lvpm_stock/__init__.py b/src/antlion/controllers/monsoon_lib/api/lvpm_stock/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/monsoon_lib/api/lvpm_stock/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/monsoon_lib/api/lvpm_stock/monsoon.py b/src/antlion/controllers/monsoon_lib/api/lvpm_stock/monsoon.py
deleted file mode 100644
index 063972f..0000000
--- a/src/antlion/controllers/monsoon_lib/api/lvpm_stock/monsoon.py
+++ /dev/null
@@ -1,151 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import multiprocessing
-import time
-
-from antlion.controllers.monsoon_lib.api.common import MonsoonResult
-from antlion.controllers.monsoon_lib.api.lvpm_stock.monsoon_proxy import MonsoonProxy
-from antlion.controllers.monsoon_lib.api.monsoon import BaseMonsoon
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import AssemblyLineBuilder
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import ThreadAssemblyLine
-from antlion.controllers.monsoon_lib.sampling.engine.transformers import DownSampler
-from antlion.controllers.monsoon_lib.sampling.engine.transformers import SampleAggregator
-from antlion.controllers.monsoon_lib.sampling.engine.transformers import Tee
-from antlion.controllers.monsoon_lib.sampling.lvpm_stock.stock_transformers import StockLvpmSampler
-
-
-class Monsoon(BaseMonsoon):
-    """The controller class for interacting with the LVPM Monsoon."""
-
-    # The device protocol has a floor value for positive voltages. Note that 0
-    # is still a valid voltage.
-    MIN_VOLTAGE = 2.01
-
-    # The device protocol does not support values above this.
-    MAX_VOLTAGE = 4.55
-
-    def __init__(self, serial, device=None):
-        super().__init__()
-        self._mon = MonsoonProxy(serialno=serial, device=device)
-        self._allocated = True
-        self.serial = serial
-
-    def set_voltage(self, voltage):
-        """Sets the output voltage of monsoon.
-
-        Args:
-            voltage: Voltage to set the output to.
-        """
-        self._log.debug('Setting voltage to %sV.' % voltage)
-        self._mon.set_voltage(voltage)
-
-    def set_max_current(self, amperes):
-        """Sets monsoon's max output current.
-
-        Args:
-            amperes: The max current in A.
-        """
-        self._mon.set_max_current(amperes)
-
-    def set_max_initial_current(self, amperes):
-        """Sets the max power-up/initial current.
-
-        Args:
-            amperes: The max initial current allowed in amperes.
-        """
-        self._mon.set_max_initial_current(amperes)
-
-    @property
-    def status(self):
-        """Gets the status params of monsoon.
-
-        Returns:
-            A dictionary of {status param, value} key-value pairs.
-        """
-        return self._mon.get_status()
-
-    def _set_usb_passthrough_mode(self, mode):
-        """Sends the call to set usb passthrough mode.
-
-        Args:
-            mode: The state to set the USB passthrough to. Can either be the
-                string name of the state or the integer value.
-
-                "Off" or 0 means USB always off.
-                "On" or 1 means USB always on.
-                "Auto" or 2 means USB is automatically turned off during
-                    sampling, and turned back on after sampling.
-        """
-        self._mon.set_usb_passthrough(mode)
-
-    def measure_power(self,
-                      duration,
-                      measure_after_seconds=0,
-                      hz=5000,
-                      output_path=None,
-                      transformers=None):
-        """See parent docstring for details."""
-        voltage = self._mon.get_voltage()
-
-        aggregator = SampleAggregator(measure_after_seconds)
-        manager = multiprocessing.Manager()
-
-        assembly_line_builder = AssemblyLineBuilder(manager.Queue,
-                                                    ThreadAssemblyLine)
-        assembly_line_builder.source(
-            StockLvpmSampler(self.serial, duration + measure_after_seconds))
-        if hz != 5000:
-            assembly_line_builder.into(DownSampler(int(round(5000 / hz))))
-        if output_path is not None:
-            assembly_line_builder.into(Tee(output_path, measure_after_seconds))
-        assembly_line_builder.into(aggregator)
-        if transformers:
-            for transformer in transformers:
-                assembly_line_builder.into(transformer)
-
-        self.take_samples(assembly_line_builder.build())
-
-        manager.shutdown()
-
-        monsoon_data = MonsoonResult(aggregator.num_samples,
-                                     aggregator.sum_currents, hz, voltage,
-                                     output_path)
-        self._log.info('Measurement summary:\n%s', str(monsoon_data))
-        return monsoon_data
-
-    def reconnect_monsoon(self):
-        """Reconnect Monsoon to serial port."""
-        self._log.debug('Close serial connection')
-        self._mon.ser.close()
-        self._log.debug('Reset serial port')
-        time.sleep(5)
-        self._log.debug('Open serial connection')
-        self._mon.ser.open()
-        self._mon.ser.reset_input_buffer()
-        self._mon.ser.reset_output_buffer()
-
-    def release_monsoon_connection(self):
-        self._mon.release_dev_port()
-        self._allocated = False
-
-    def is_allocated(self):
-        return self._allocated
-
-    def establish_monsoon_connection(self):
-        self._mon.obtain_dev_port()
-        self._allocated = True
-        # Makes sure the Monsoon is in the command-receiving state.
-        self._mon.stop_data_collection()
diff --git a/src/antlion/controllers/monsoon_lib/api/lvpm_stock/monsoon_proxy.py b/src/antlion/controllers/monsoon_lib/api/lvpm_stock/monsoon_proxy.py
deleted file mode 100644
index 909bff4..0000000
--- a/src/antlion/controllers/monsoon_lib/api/lvpm_stock/monsoon_proxy.py
+++ /dev/null
@@ -1,402 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""The interface for a USB-connected Monsoon power meter.
-
-Details on the protocol can be found at
-(http://msoon.com/LabEquipment/PowerMonitor/)
-
-Based on the original py2 script of kens@google.com.
-"""
-import collections
-import fcntl
-import logging
-import os
-import select
-import struct
-import sys
-import time
-
-import errno
-import serial
-
-from antlion.controllers.monsoon_lib.api.common import MonsoonError
-
-
-class LvpmStatusPacket(object):
-    """The data received from asking an LVPM Monsoon for its status.
-
-    Attributes names with the same values as HVPM match those defined in
-    Monsoon.Operations.statusPacket.
-    """
-
-    def __init__(self, values):
-        iter_value = iter(values)
-        self.packetType = next(iter_value)
-        self.firmwareVersion = next(iter_value)
-        self.protocolVersion = next(iter_value)
-        self.mainFineCurrent = next(iter_value)
-        self.usbFineCurrent = next(iter_value)
-        self.auxFineCurrent = next(iter_value)
-        self.voltage1 = next(iter_value)
-        self.mainCoarseCurrent = next(iter_value)
-        self.usbCoarseCurrent = next(iter_value)
-        self.auxCoarseCurrent = next(iter_value)
-        self.voltage2 = next(iter_value)
-        self.outputVoltageSetting = next(iter_value)
-        self.temperature = next(iter_value)
-        self.status = next(iter_value)
-        self.leds = next(iter_value)
-        self.mainFineResistor = next(iter_value)
-        self.serialNumber = next(iter_value)
-        self.sampleRate = next(iter_value)
-        self.dacCalLow = next(iter_value)
-        self.dacCalHigh = next(iter_value)
-        self.powerupCurrentLimit = next(iter_value)
-        self.runtimeCurrentLimit = next(iter_value)
-        self.powerupTime = next(iter_value)
-        self.usbFineResistor = next(iter_value)
-        self.auxFineResistor = next(iter_value)
-        self.initialUsbVoltage = next(iter_value)
-        self.initialAuxVoltage = next(iter_value)
-        self.hardwareRevision = next(iter_value)
-        self.temperatureLimit = next(iter_value)
-        self.usbPassthroughMode = next(iter_value)
-        self.mainCoarseResistor = next(iter_value)
-        self.usbCoarseResistor = next(iter_value)
-        self.auxCoarseResistor = next(iter_value)
-        self.defMainFineResistor = next(iter_value)
-        self.defUsbFineResistor = next(iter_value)
-        self.defAuxFineResistor = next(iter_value)
-        self.defMainCoarseResistor = next(iter_value)
-        self.defUsbCoarseResistor = next(iter_value)
-        self.defAuxCoarseResistor = next(iter_value)
-        self.eventCode = next(iter_value)
-        self.eventData = next(iter_value)
-
-
-class MonsoonProxy(object):
-    """Class that directly talks to monsoon over serial.
-
-    Provides a simple class to use the power meter.
-    See http://wiki/Main/MonsoonProtocol for information on the protocol.
-    """
-
-    # The format of the status packet.
-    STATUS_FORMAT = '>BBBhhhHhhhHBBBxBbHBHHHHBbbHHBBBbbbbbbbbbBH'
-
-    # The list of fields that appear in the Monsoon status packet.
-    STATUS_FIELDS = [
-        'packetType',
-        'firmwareVersion',
-        'protocolVersion',
-        'mainFineCurrent',
-        'usbFineCurrent',
-        'auxFineCurrent',
-        'voltage1',
-        'mainCoarseCurrent',
-        'usbCoarseCurrent',
-        'auxCoarseCurrent',
-        'voltage2',
-        'outputVoltageSetting',
-        'temperature',
-        'status',
-        'leds',
-        'mainFineResistorOffset',
-        'serialNumber',
-        'sampleRate',
-        'dacCalLow',
-        'dacCalHigh',
-        'powerupCurrentLimit',
-        'runtimeCurrentLimit',
-        'powerupTime',
-        'usbFineResistorOffset',
-        'auxFineResistorOffset',
-        'initialUsbVoltage',
-        'initialAuxVoltage',
-        'hardwareRevision',
-        'temperatureLimit',
-        'usbPassthroughMode',
-        'mainCoarseResistorOffset',
-        'usbCoarseResistorOffset',
-        'auxCoarseResistorOffset',
-        'defMainFineResistor',
-        'defUsbFineResistor',
-        'defAuxFineResistor',
-        'defMainCoarseResistor',
-        'defUsbCoarseResistor',
-        'defAuxCoarseResistor',
-        'eventCode',
-        'eventData',
-    ]
-
-    def __init__(self, device=None, serialno=None, connection_timeout=600):
-        """Establish a connection to a Monsoon.
-
-        By default, opens the first available port, waiting if none are ready.
-
-        Args:
-            device: The particular device port to be used.
-            serialno: The Monsoon's serial number.
-            connection_timeout: The number of seconds to wait for the device to
-                connect.
-
-        Raises:
-            TimeoutError if unable to connect to the device.
-        """
-        self.start_voltage = 0
-        self.serial = serialno
-
-        if device:
-            self.ser = serial.Serial(device, timeout=1)
-            return
-        # Try all devices connected through USB virtual serial ports until we
-        # find one we can use.
-        self._tempfile = None
-        self.obtain_dev_port(connection_timeout)
-        self.log = logging.getLogger()
-
-    def obtain_dev_port(self, timeout=600):
-        """Obtains the device port for this Monsoon.
-
-        Args:
-            timeout: The time in seconds to wait for the device to connect.
-
-        Raises:
-            TimeoutError if the device was unable to be found, or was not
-            available.
-        """
-        start_time = time.time()
-
-        while start_time + timeout > time.time():
-            for dev in os.listdir('/dev'):
-                prefix = 'ttyACM'
-                # Prefix is different on Mac OS X.
-                if sys.platform == 'darwin':
-                    prefix = 'tty.usbmodem'
-                if not dev.startswith(prefix):
-                    continue
-                tmpname = '/tmp/monsoon.%s.%s' % (os.uname()[0], dev)
-                self._tempfile = open(tmpname, 'w')
-                if not os.access(tmpname, os.R_OK | os.W_OK):
-                    try:
-                        os.chmod(tmpname, 0o666)
-                    except OSError as e:
-                        if e.errno == errno.EACCES:
-                            raise ValueError(
-                                'Unable to set permissions to read/write to '
-                                '%s. This file is owned by another user; '
-                                'please grant o+wr access to this file, or '
-                                'run as that user.')
-                        raise
-
-                try:  # Use a lock file to ensure exclusive access.
-                    fcntl.flock(self._tempfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
-                except IOError:
-                    logging.error('Device %s is in use.', repr(dev))
-                    continue
-
-                try:  # try to open the device
-                    self.ser = serial.Serial('/dev/%s' % dev, timeout=1)
-                    self.stop_data_collection()  # just in case
-                    self._flush_input()  # discard stale input
-                    status = self.get_status()
-                except Exception as e:
-                    logging.warning('Error opening device %s: %s', dev, e,
-                                    exc_info=True)
-                    continue
-
-                if not status:
-                    logging.error('No response from device %s.', dev)
-                elif self.serial and status.serialNumber != self.serial:
-                    logging.error('Another device serial #%d seen on %s',
-                                  status.serialNumber, dev)
-                else:
-                    self.start_voltage = status.voltage1
-                    return
-
-            self._tempfile = None
-            logging.info('Waiting for device...')
-            time.sleep(1)
-        raise TimeoutError(
-            'Unable to connect to Monsoon device with '
-            'serial "%s" within %s seconds.' % (self.serial, timeout))
-
-    def release_dev_port(self):
-        """Releases the dev port used to communicate with the Monsoon device."""
-        fcntl.flock(self._tempfile, fcntl.LOCK_UN)
-        self._tempfile.close()
-        self.ser.close()
-
-    def get_status(self):
-        """Requests and waits for status.
-
-        Returns:
-            status dictionary.
-        """
-        self._send_struct('BBB', 0x01, 0x00, 0x00)
-        read_bytes = self._read_packet()
-
-        if not read_bytes:
-            raise MonsoonError('Failed to read Monsoon status')
-        expected_size = struct.calcsize(self.STATUS_FORMAT)
-        if len(read_bytes) != expected_size or read_bytes[0] != 0x10:
-            raise MonsoonError('Wanted status, dropped type=0x%02x, len=%d',
-                               read_bytes[0], len(read_bytes))
-
-        status = collections.OrderedDict(
-            zip(self.STATUS_FIELDS,
-                struct.unpack(self.STATUS_FORMAT, read_bytes)))
-        p_type = status['packetType']
-        if p_type != 0x10:
-            raise MonsoonError('Packet type %s is not 0x10.' % p_type)
-
-        for k in status.keys():
-            if k.endswith('VoltageSetting'):
-                status[k] = 2.0 + status[k] * 0.01
-            elif k.endswith('FineCurrent'):
-                pass  # needs calibration data
-            elif k.endswith('CoarseCurrent'):
-                pass  # needs calibration data
-            elif k.startswith('voltage') or k.endswith('Voltage'):
-                status[k] = status[k] * 0.000125
-            elif k.endswith('Resistor'):
-                status[k] = 0.05 + status[k] * 0.0001
-                if k.startswith('aux') or k.startswith('defAux'):
-                    status[k] += 0.05
-            elif k.endswith('CurrentLimit'):
-                status[k] = 8 * (1023 - status[k]) / 1023.0
-        return LvpmStatusPacket(status.values())
-
-    def set_voltage(self, voltage):
-        """Sets the voltage on the device to the specified value.
-
-        Args:
-            voltage: Either 0 or a value between 2.01 and 4.55 inclusive.
-
-        Raises:
-            struct.error if voltage is an invalid value.
-        """
-        # The device has a range of 255 voltage values:
-        #
-        #     0   is "off". Note this value not set outputVoltageSetting to
-        #             zero. The previous outputVoltageSetting value is
-        #             maintained.
-        #     1   is 2.01V.
-        #     255 is 4.55V.
-        voltage_byte = max(0, round((voltage - 2.0) * 100))
-        self._send_struct('BBB', 0x01, 0x01, voltage_byte)
-
-    def get_voltage(self):
-        """Get the output voltage.
-
-        Returns:
-            Current Output Voltage (in unit of V).
-        """
-        return self.get_status().outputVoltageSetting
-
-    def set_max_current(self, i):
-        """Set the max output current."""
-        if i < 0 or i > 8:
-            raise MonsoonError(('Target max current %sA, is out of acceptable '
-                                'range [0, 8].') % i)
-        val = 1023 - int((i / 8) * 1023)
-        self._send_struct('BBB', 0x01, 0x0a, val & 0xff)
-        self._send_struct('BBB', 0x01, 0x0b, val >> 8)
-
-    def set_max_initial_current(self, current):
-        """Sets the maximum initial current, in mA."""
-        if current < 0 or current > 8:
-            raise MonsoonError(('Target max current %sA, is out of acceptable '
-                                'range [0, 8].') % current)
-        val = 1023 - int((current / 8) * 1023)
-        self._send_struct('BBB', 0x01, 0x08, val & 0xff)
-        self._send_struct('BBB', 0x01, 0x09, val >> 8)
-
-    def set_usb_passthrough(self, passthrough_mode):
-        """Set the USB passthrough mode.
-
-        Args:
-            passthrough_mode: The mode used for passthrough. Must be the integer
-                value. See common.PassthroughModes for a list of values and
-                their meanings.
-        """
-        self._send_struct('BBB', 0x01, 0x10, passthrough_mode)
-
-    def get_usb_passthrough(self):
-        """Get the USB passthrough mode: 0 = off, 1 = on,  2 = auto.
-
-        Returns:
-            The mode used for passthrough, as an integer. See
-                common.PassthroughModes for a list of values and their meanings.
-        """
-        return self.get_status().usbPassthroughMode
-
-    def start_data_collection(self):
-        """Tell the device to start collecting and sending measurement data."""
-        self._send_struct('BBB', 0x01, 0x1b, 0x01)  # Mystery command
-        self._send_struct('BBBBBBB', 0x02, 0xff, 0xff, 0xff, 0xff, 0x03, 0xe8)
-
-    def stop_data_collection(self):
-        """Tell the device to stop collecting measurement data."""
-        self._send_struct('BB', 0x03, 0x00)  # stop
-
-    def _send_struct(self, fmt, *args):
-        """Pack a struct (without length or checksum) and send it."""
-        # Flush out the input buffer before sending data
-        self._flush_input()
-        data = struct.pack(fmt, *args)
-        data_len = len(data) + 1
-        checksum = (data_len + sum(bytearray(data))) % 256
-        out = struct.pack('B', data_len) + data + struct.pack('B', checksum)
-        self.ser.write(out)
-
-    def _read_packet(self):
-        """Returns a single packet as a string (without length or checksum)."""
-        len_char = self.ser.read(1)
-        if not len_char:
-            raise MonsoonError('Reading from serial port timed out')
-
-        data_len = ord(len_char)
-        if not data_len:
-            return ''
-        result = self.ser.read(int(data_len))
-        result = bytearray(result)
-        if len(result) != data_len:
-            raise MonsoonError(
-                'Length mismatch, expected %d bytes, got %d bytes.', data_len,
-                len(result))
-        body = result[:-1]
-        checksum = (sum(struct.unpack('B' * len(body), body)) + data_len) % 256
-        if result[-1] != checksum:
-            raise MonsoonError(
-                'Invalid checksum from serial port! Expected %s, got %s',
-                hex(checksum), hex(result[-1]))
-        return result[:-1]
-
-    def _flush_input(self):
-        """Flushes all read data until the input is empty."""
-        self.ser.reset_input_buffer()
-        while True:
-            ready_r, ready_w, ready_x = select.select([self.ser], [],
-                                                      [self.ser], 0)
-            if len(ready_x) > 0:
-                raise MonsoonError('Exception from serial port.')
-            elif len(ready_r) > 0:
-                self.ser.read(1)  # This may cause underlying buffering.
-                # Flush the underlying buffer too.
-                self.ser.reset_input_buffer()
-            else:
-                break
diff --git a/src/antlion/controllers/monsoon_lib/api/monsoon.py b/src/antlion/controllers/monsoon_lib/api/monsoon.py
deleted file mode 100644
index 68ab81c..0000000
--- a/src/antlion/controllers/monsoon_lib/api/monsoon.py
+++ /dev/null
@@ -1,300 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import logging
-import time
-
-from antlion.controllers.monsoon_lib.api import common
-from antlion.controllers.monsoon_lib.api.common import MonsoonError
-from antlion.controllers.monsoon_lib.api.common import PassthroughStates
-
-
-class BaseMonsoon(object):
-    """The base class for all Monsoon interface devices.
-
-    Attributes:
-        on_reconnect: The function to call when Monsoon has reconnected USB.
-            Raises TimeoutError if the device cannot be found.
-        on_disconnect: The function to call when Monsoon has disconnected USB.
-    """
-
-    # The minimum non-zero supported voltage for the given Monsoon device.
-    MIN_VOLTAGE = NotImplemented
-
-    # The maximum practical voltage for the given Monsoon device.
-    MAX_VOLTAGE = NotImplemented
-
-    # When ramping voltage, the rate in volts/second to increase the voltage.
-    VOLTAGE_RAMP_RATE = 3
-
-    # The time step between voltage increments. This value does not need to be
-    # modified.
-    VOLTAGE_RAMP_TIME_STEP = .1
-
-    def __init__(self):
-        self._log = logging.getLogger()
-        self.on_disconnect = lambda: None
-        self.on_reconnect = lambda: None
-
-    @classmethod
-    def get_closest_valid_voltage(cls, voltage):
-        """Returns the nearest valid voltage value."""
-        if voltage < cls.MIN_VOLTAGE / 2:
-            return 0
-        else:
-            return max(cls.MIN_VOLTAGE, min(voltage, cls.MAX_VOLTAGE))
-
-    @classmethod
-    def is_voltage_valid(cls, voltage):
-        """Returns True iff the given voltage can be set on the device.
-
-        Valid voltage values are {x | x ∈ {0} ∪ [MIN_VOLTAGE, MAX_VOLTAGE]}.
-        """
-        return cls.get_closest_valid_voltage(voltage) == voltage
-
-    @classmethod
-    def validate_voltage(cls, voltage):
-        """Raises a MonsoonError if the given voltage cannot be set."""
-        if not cls.is_voltage_valid(voltage):
-            raise MonsoonError('Invalid voltage %s. Voltage must be zero or '
-                               'within range [%s, %s].' %
-                               (voltage, cls.MIN_VOLTAGE, cls.MAX_VOLTAGE))
-
-    def set_voltage_safe(self, voltage):
-        """Sets the output voltage of monsoon to a safe value.
-
-        This function is effectively:
-            self.set_voltage(self.get_closest_valid_voltage(voltage)).
-
-        Args:
-            voltage: The voltage to set the output to.
-        """
-        normalized_voltage = self.get_closest_valid_voltage(voltage)
-        if voltage != normalized_voltage:
-            self._log.debug(
-                'Requested voltage %sV is invalid.' % voltage)
-        self.set_voltage(normalized_voltage)
-
-    def ramp_voltage(self, start, end):
-        """Ramps up the voltage to the specified end voltage.
-
-        Increments the voltage by fixed intervals of .1 Volts every .1 seconds.
-
-        Args:
-            start: The starting voltage
-            end: the end voltage. Must be higher than the starting voltage.
-        """
-        voltage = start
-
-        while voltage < end:
-            self.set_voltage(self.get_closest_valid_voltage(voltage))
-            voltage += self.VOLTAGE_RAMP_RATE * self.VOLTAGE_RAMP_TIME_STEP
-            time.sleep(self.VOLTAGE_RAMP_TIME_STEP)
-        self.set_voltage(end)
-
-    def usb(self, state):
-        """Sets the monsoon's USB passthrough mode.
-
-        This is specific to the USB port in front of the monsoon box which
-        connects to the powered device, NOT the USB that is used to talk to the
-        monsoon itself.
-
-        Args:
-            state: The state to set the USB passthrough to. Can either be the
-                string name of the state or the integer value.
-
-                "Off" or 0 means USB always off.
-                "On" or 1 means USB always on.
-                "Auto" or 2 means USB is automatically turned off during
-                    sampling, and turned back on after sampling.
-
-        Raises:
-            ValueError if the state given is invalid.
-            TimeoutError if unable to set the passthrough mode within a minute,
-                or if the device was not found after setting the state to ON.
-        """
-        expected_state = None
-        states_dict = common.PASSTHROUGH_STATES
-        if isinstance(state, str):
-            normalized_state = state.lower()
-            expected_state = states_dict.get(normalized_state, None)
-        elif state in states_dict.values():
-            expected_state = state
-
-        if expected_state is None:
-            raise ValueError(
-                'USB passthrough state %s is not a valid state. '
-                'Expected any of %s.' % (repr(state), states_dict))
-        if self.status.usbPassthroughMode == expected_state:
-            return
-
-        if expected_state in [PassthroughStates.OFF, PassthroughStates.AUTO]:
-            self.on_disconnect()
-
-        start_time = time.time()
-        time_limit_seconds = 60
-        while self.status.usbPassthroughMode != expected_state:
-            current_time = time.time()
-            if current_time >= start_time + time_limit_seconds:
-                raise TimeoutError('Setting USB mode timed out after %s '
-                                   'seconds.' % time_limit_seconds)
-            self._set_usb_passthrough_mode(expected_state)
-            time.sleep(1)
-        self._log.info('Monsoon usbPassthroughMode is now "%s"',
-                       state)
-
-        if expected_state in [PassthroughStates.ON]:
-            self._on_reconnect()
-
-    def attach_device(self, android_device):
-        """Deprecated. Use the connection callbacks instead."""
-
-        def on_reconnect():
-            # Make sure the device is connected and available for commands.
-            android_device.wait_for_boot_completion()
-            android_device.start_services()
-            # Release wake lock to put device into sleep.
-            android_device.droid.goToSleepNow()
-            self._log.info('Dut reconnected.')
-
-        def on_disconnect():
-            android_device.stop_services()
-            time.sleep(1)
-
-        self.on_reconnect = on_reconnect
-        self.on_disconnect = on_disconnect
-
-    def set_on_disconnect(self, callback):
-        """Sets the callback to be called when Monsoon disconnects USB."""
-        self.on_disconnect = callback
-
-    def set_on_reconnect(self, callback):
-        """Sets the callback to be called when Monsoon reconnects USB."""
-        self.on_reconnect = callback
-
-    def take_samples(self, assembly_line):
-        """Runs the sampling procedure based on the given assembly line."""
-        # Sampling is always done in a separate process. Release the Monsoon
-        # so the child process can sample from the Monsoon.
-        self.release_monsoon_connection()
-
-        try:
-            assembly_line.run()
-        finally:
-            self.establish_monsoon_connection()
-
-    def measure_power(self,
-                      duration,
-                      measure_after_seconds=0,
-                      hz=5000,
-                      output_path=None,
-                      transformers=None):
-        """Measure power consumption of the attached device.
-
-        This function is a default implementation of measuring power consumption
-        during gathering measurements. For offline methods, use take_samples()
-        with a custom AssemblyLine.
-
-        Args:
-            duration: Amount of time to measure power for. Note:
-                total_duration = duration + measure_after_seconds
-            measure_after_seconds: Number of seconds to wait before beginning
-                reading measurement.
-            hz: The number of samples to collect per second. Must be a factor
-                of 5000.
-            output_path: The location to write the gathered data to.
-            transformers: A list of Transformer objects that receive passed-in
-                          samples. Runs in order sent.
-
-        Returns:
-            A MonsoonData object with the measured power data.
-        """
-        raise NotImplementedError()
-
-    def set_voltage(self, voltage):
-        """Sets the output voltage of monsoon.
-
-        Args:
-            voltage: The voltage to set the output to.
-        """
-        raise NotImplementedError()
-
-    def set_max_current(self, amperes):
-        """Sets monsoon's max output current.
-
-        Args:
-            amperes: The max current in A.
-        """
-        raise NotImplementedError()
-
-    def set_max_initial_current(self, amperes):
-        """Sets the max power-up/initial current.
-
-        Args:
-            amperes: The max initial current allowed in amperes.
-        """
-        raise NotImplementedError()
-
-    @property
-    def status(self):
-        """Gets the status params of monsoon.
-
-        Returns:
-            A dictionary of {status param, value} key-value pairs.
-        """
-        raise NotImplementedError()
-
-    def _on_reconnect(self):
-        """Reconnects the DUT over USB.
-
-        Raises:
-            TimeoutError upon failure to reconnect over USB.
-        """
-        self._log.info('Reconnecting dut.')
-        # Wait for two seconds to ensure that the device is ready, then
-        # attempt to reconnect. If reconnect times out, reset the passthrough
-        # state and try again.
-        time.sleep(2)
-        try:
-            self.on_reconnect()
-        except TimeoutError as err:
-            self._log.info('Toggling USB and trying again. %s' % err)
-            self.usb(PassthroughStates.OFF)
-            time.sleep(1)
-            self.usb(PassthroughStates.ON)
-            self.on_reconnect()
-
-    def _set_usb_passthrough_mode(self, mode):
-        """Makes the underlying Monsoon call to set passthrough mode."""
-        raise NotImplementedError()
-
-    def reconnect_monsoon(self):
-        """Reconnects the Monsoon Serial/USB connection."""
-        raise NotImplementedError()
-
-    def is_allocated(self):
-        """Whether the resource is locked."""
-        raise NotImplementedError()
-
-    def release_monsoon_connection(self):
-        """Releases the underlying monsoon Serial or USB connection.
-
-        Useful for allowing other processes access to the device.
-        """
-        raise NotImplementedError()
-
-    def establish_monsoon_connection(self):
-        """Establishes the underlying monsoon Serial or USB connection."""
-        raise NotImplementedError()
diff --git a/src/antlion/controllers/monsoon_lib/sampling/__init__.py b/src/antlion/controllers/monsoon_lib/sampling/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/monsoon_lib/sampling/common.py b/src/antlion/controllers/monsoon_lib/sampling/common.py
deleted file mode 100644
index 7db8baf..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/common.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-class UncalibratedSampleChunk(object):
-    """An uncalibrated sample collection stored with its calibration data.
-
-    These objects are created by the SampleChunker Transformer and read by
-    the CalibrationApplier Transformer.
-
-    Attributes:
-        samples: the uncalibrated samples list
-        calibration_data: the data used to calibrate the samples.
-    """
-
-    def __init__(self, samples, calibration_data):
-        self.samples = samples
-        self.calibration_data = calibration_data
diff --git a/src/antlion/controllers/monsoon_lib/sampling/engine/__init__.py b/src/antlion/controllers/monsoon_lib/sampling/engine/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/engine/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/monsoon_lib/sampling/engine/assembly_line.py b/src/antlion/controllers/monsoon_lib/sampling/engine/assembly_line.py
deleted file mode 100644
index 88cc733..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/engine/assembly_line.py
+++ /dev/null
@@ -1,328 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import queue
-from concurrent.futures import ThreadPoolExecutor
-import multiprocessing
-
-
-class AssemblyLine(object):
-    """A class for passing data through a chain of threads or processes,
-    assembly-line style.
-
-    Attributes:
-        nodes: A list of AssemblyLine.Nodes that pass data from one node to the
-            next.
-    """
-
-    class Node(object):
-        """A Node in an AssemblyLine.
-
-        Each node is composed of the following:
-
-         input_stream                    output_stream
-        ==============> [ transformer ] ===============>
-
-        Attributes:
-            transformer: The Transformer that takes input from the input
-                stream, transforms the data, and sends it to the output stream.
-            input_stream: The stream of data to be taken in as input to this
-                transformer. This stream is the stream to be registered as the
-                previous node's output stream.
-
-        Properties:
-            output_stream: The stream of data to be passed to the next node.
-        """
-
-        def __init__(self, transformer=None, input_stream=None):
-            self.transformer = transformer
-            self.input_stream = input_stream
-
-        @property
-        def output_stream(self):
-            return self.transformer.output_stream
-
-        @output_stream.setter
-        def output_stream(self, value):
-            self.transformer.output_stream = value
-
-    def __init__(self, nodes):
-        """Initializes an AssemblyLine class.
-
-        nodes:
-            A list of AssemblyLine.Node objects.
-        """
-        self.nodes = nodes
-
-    def run(self):
-        """Runs the AssemblyLine, passing the data between each work node."""
-        raise NotImplementedError()
-
-
-class ProcessAssemblyLine(AssemblyLine):
-    """An AssemblyLine that uses processes to schedule work on nodes."""
-
-    def run(self):
-        """Runs the AssemblyLine within a process pool."""
-        if not self.nodes:
-            # If self.nodes is empty, it will create a multiprocessing.Pool of
-            # 0 nodes, which raises a ValueError.
-            return
-
-        process_pool = multiprocessing.Pool(processes=len(self.nodes))
-        for node in self.nodes:
-            process_pool.apply_async(node.transformer.transform,
-                                     [node.input_stream])
-        process_pool.close()
-        process_pool.join()
-
-
-class ThreadAssemblyLine(AssemblyLine):
-    """An AssemblyLine that uses threading to schedule work on nodes."""
-
-    def run(self):
-        """Runs the AssemblyLine within a thread pool."""
-        with ThreadPoolExecutor(max_workers=len(self.nodes)) as thread_pool:
-            for node in self.nodes:
-                thread_pool.submit(node.transformer.transform,
-                                   node.input_stream)
-
-
-class AssemblyLineBuilder(object):
-    """An abstract class that builds an AssemblyLine object.
-
-    Attributes:
-    _assembly_line_generator: The callable that creates the AssemblyLine.
-        Should be in the form of:
-
-            Args:
-                A list of AssemblyLine.Node objects.
-
-            Returns:
-                An AssemblyLine object.
-
-    _queue_generator: The callable that creates new queues to be used for
-        BufferStreams. Should be in the form of:
-
-            Args:
-                None.
-
-            Returns:
-                A Queue object.
-    """
-
-    def __init__(self, queue_generator, assembly_line_generator):
-        """Creates an AssemblyLineBuilder.
-
-        Args:
-            queue_generator: A callable of type lambda: Queue().
-            assembly_line_generator: A callable of type
-                lambda list<AssemblyLine.Node>: AssemblyLine.
-        """
-        super().__init__()
-        self._assembly_line_generator = assembly_line_generator
-        self._queue_generator = queue_generator
-
-        self.nodes = []
-        self._built = False
-
-    @property
-    def built(self):
-        return self._built
-
-    def __generate_queue(self):
-        """Returns a new Queue object for passing information between nodes."""
-        return self._queue_generator()
-
-    @property
-    def queue_generator(self):
-        """Returns the callable used for generating queues."""
-        return self._queue_generator
-
-    def source(self, transformer, input_stream=None):
-        """Adds a SourceTransformer to the AssemblyLine.
-
-        Must be the first function call on the AssemblyLineBuilder.
-
-        Args:
-            transformer: The SourceTransformer that generates data for the
-                AssemblyLine to process.
-            input_stream: The input stream to use, if necessary.
-
-        Raises:
-            ValueError if source is not the first transformer to be added to
-                the AssemblyLine, or the AssemblyLine has been built.
-        """
-        if self.nodes:
-            raise ValueError('AssemblyLines can only have a single source.')
-        if input_stream is None:
-            input_stream = DevNullBufferStream()
-        self.nodes.append(AssemblyLine.Node(transformer, input_stream))
-        return self
-
-    def into(self, transformer):
-        """Adds the given transformer next in the AssemblyLine.
-
-        Args:
-            transformer: The transformer next in the AssemblyLine.
-
-        Raises:
-            ValueError if no source node is set, or the AssemblyLine has been
-                built.
-        """
-        if not self.nodes:
-            raise ValueError('The source transformer must be set first.')
-        if self.built:
-            raise ValueError('Cannot add additional nodes after the '
-                             'AssemblyLine has been built.')
-        stream = BufferStream(self.__generate_queue())
-        self.nodes[-1].transformer.set_output_stream(stream)
-        self.nodes.append(AssemblyLine.Node(transformer, stream))
-        return self
-
-    def build(self, output_stream=None):
-        """Builds the AssemblyLine object.
-
-        Note that after this function is called this AssemblyLineBuilder cannot
-        be used again, as it is already marked as built.
-        """
-        if self.built:
-            raise ValueError('The AssemblyLine is already built.')
-        if not self.nodes:
-            raise ValueError('Cannot create an empty assembly line.')
-        self._built = True
-        if output_stream is None:
-            output_stream = DevNullBufferStream()
-        self.nodes[-1].output_stream = output_stream
-        return self._assembly_line_generator(self.nodes)
-
-
-class ThreadAssemblyLineBuilder(AssemblyLineBuilder):
-    """An AssemblyLineBuilder for generating ThreadAssemblyLines."""
-
-    def __init__(self, queue_generator=queue.Queue):
-        super().__init__(queue_generator, ThreadAssemblyLine)
-
-
-class ProcessAssemblyLineBuilder(AssemblyLineBuilder):
-    """An AssemblyLineBuilder for ProcessAssemblyLines.
-
-    Attributes:
-        manager: The multiprocessing.Manager used for having queues communicate
-            with one another over multiple processes.
-    """
-
-    def __init__(self):
-        self.manager = multiprocessing.Manager()
-        super().__init__(self.manager.Queue, ProcessAssemblyLine)
-
-
-class IndexedBuffer(object):
-    """A buffer indexed with the order it was generated in."""
-
-    def __init__(self, index, size_or_buffer):
-        """Creates an IndexedBuffer.
-
-        Args:
-            index: The integer index associated with the buffer.
-            size_or_buffer:
-                either:
-                    An integer specifying the number of slots in the buffer OR
-                    A list to be used as a buffer.
-        """
-        self.index = index
-        if isinstance(size_or_buffer, int):
-            self.buffer = [None] * size_or_buffer
-        else:
-            self.buffer = size_or_buffer
-
-
-class BufferList(list):
-    """A list of Buffers.
-
-    This type is useful for differentiating when a buffer has been returned
-    from a transformer, vs when a list of buffers has been returned from a
-    transformer.
-    """
-
-
-class BufferStream(object):
-    """An object that acts as a stream between two transformers."""
-
-    # The object passed to the buffer queue to signal the end-of-stream.
-    END = None
-
-    def __init__(self, buffer_queue):
-        """Creates a new BufferStream.
-
-        Args:
-            buffer_queue: A Queue object used to pass data along the
-                BufferStream.
-        """
-        self._buffer_queue = buffer_queue
-
-    def initialize(self):
-        """Initializes the stream.
-
-        When running BufferStreams through multiprocessing, initialize must
-        only be called on the process using the BufferStream.
-        """
-        # Here we need to make any call to the stream to initialize it. This
-        # makes read and write times for the first buffer faster, preventing
-        # the data at the beginning from being dropped.
-        self._buffer_queue.qsize()
-
-    def end_stream(self):
-        """Closes the stream.
-
-        By convention, a None object is used, mirroring file reads returning
-        an empty string when the end of file is reached.
-        """
-        self._buffer_queue.put(None, block=False)
-
-    def add_indexed_buffer(self, buffer):
-        """Adds the given buffer to the buffer stream."""
-        self._buffer_queue.put(buffer, block=False)
-
-    def remove_indexed_buffer(self):
-        """Removes an indexed buffer from the array.
-
-        This operation blocks until data is received.
-
-        Returns:
-            an IndexedBuffer.
-        """
-        return self._buffer_queue.get()
-
-
-class DevNullBufferStream(BufferStream):
-    """A BufferStream that is always empty."""
-
-    def __init__(self, *_):
-        super().__init__(None)
-
-    def initialize(self):
-        """Does nothing. Nothing to initialize."""
-
-    def end_stream(self):
-        """Does nothing. The stream always returns end-of-stream when read."""
-
-    def add_indexed_buffer(self, buffer):
-        """Imitating /dev/null, nothing will be written to the stream."""
-
-    def remove_indexed_buffer(self):
-        """Always returns the end-of-stream marker."""
-        return None
diff --git a/src/antlion/controllers/monsoon_lib/sampling/engine/calibration.py b/src/antlion/controllers/monsoon_lib/sampling/engine/calibration.py
deleted file mode 100644
index 1e531ed..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/engine/calibration.py
+++ /dev/null
@@ -1,181 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-class CalibrationError(Exception):
-    """Raised when a value is requested before it is properly calibrated."""
-
-
-class CalibrationCollection(object):
-    """The interface for keeping track of calibration values.
-
-    This class is an abstract representation of a collection of Calibration
-    values. Some CalibrationCollections may simply be a dictionary that returns
-    values given to it (see CalibrationScalars). Others may accept multiple
-    values and return the average for a set rolling window (see
-    CalibrationWindow).
-
-    Whichever the implementation, this interface gives end-users a way of
-    setting and querying a collection of calibration data that comes from a
-    Monsoon device.
-    """
-
-    def add(self, channel, origin, granularity, value):
-        """Adds a value to the calibration storage.
-
-        The passed in channel, origin, and granularity arguments will be used
-        as a key to handle and store the value passed in.
-
-        Args:
-            channel: The channel this value comes from. See
-                MonsoonConstants.Channel.
-            origin: The origin type for this value. See MonsoonConstants.Origin.
-            granularity: The granularity type for this value. See
-                MonsoonConstants.Granularity.
-            value: The value to set within the collection.
-        """
-        raise NotImplementedError()
-
-    def get_keys(self):
-        """Returns the list of possible keys for obtaining calibration data.
-
-        Not all possible (Channel, Origin, Granularity) combinations may be
-        available for all CalibrationCollections. It is also not guaranteed the
-        CalibrationCollection's key set is static.
-        """
-        raise NotImplementedError()
-
-    def get(self, channel, origin, granularity):
-        """Returns the calibration value for a given key."""
-        raise NotImplementedError()
-
-
-class CalibrationWindows(CalibrationCollection):
-    """A class that holds calibration data in sliding windows.
-
-    After the window size has been filled, a calibration value is removed every
-    time a new calibration value is added.
-    """
-
-    def __init__(self, calibration_window_size=5):
-        """Creates a collection of CalibrationWindows.
-
-        calibration_window_size: The number of entries in the rolling window to
-            consider for calibration.
-        """
-        super().__init__()
-        self._calibrations = dict()
-        self._calibration_window_size = calibration_window_size
-
-    def add(self, channel, origin, granularity, value):
-        """Adds the given value to the given calibration window.
-
-        Args:
-            channel: The channel being calibrated.
-            origin: The origin value being calibrated.
-            granularity: The granularity level being calibrated.
-            value: The calibration value.
-        """
-        window = self._calibrations[(channel, origin, granularity)]
-        if len(window) == self._calibration_window_size:
-            window.popleft()
-        window.append(value)
-
-    def get_keys(self):
-        return self._calibrations.keys()
-
-    def get(self, channel, origin, granularity):
-        window = self._calibrations[(channel, origin, granularity)]
-        if len(window) < self._calibration_window_size:
-            raise CalibrationError('%s is not calibrated yet.' % repr(
-                (channel, origin, granularity)))
-        return sum(window) / self._calibration_window_size
-
-
-class CalibrationScalars(CalibrationCollection):
-    """A collection of calibrations where scalar values are used.
-
-    Reading scalar calibration values are faster than calculating the
-    calibration value from rolling windows.
-    """
-
-    def __init__(self):
-        self._calibrations = dict()
-
-    def get_keys(self):
-        return self._calibrations.keys()
-
-    def add(self, channel, origin, granularity, value):
-        """Adds a value to the calibration storage.
-
-        Note that if a value is already within the collection, it will be
-        overwritten, since CalibrationScalars can only hold a single value.
-
-        Args:
-            channel: The channel being calibrated.
-            origin: The origin value being calibrated.
-            granularity: The granularity level being calibrated.
-            value: The calibration value.
-        """
-        self._calibrations[(channel, origin, granularity)] = value
-
-    def get(self, channel, origin, granularity):
-        return self._calibrations[(channel, origin, granularity)]
-
-
-class CalibrationSnapshot(CalibrationScalars):
-    """A collection of calibrations taken from another CalibrationCollection.
-
-    CalibrationSnapshot calculates all of the calibration values of another
-    CalibrationCollection and creates a snapshot of those values. This allows
-    the CalibrationWindows to continue getting new values while another thread
-    processes the calibration on previously gathered values.
-    """
-
-    def __init__(self, calibration_collection):
-        """Generates a CalibrationSnapshot from another CalibrationCollection.
-
-        Args:
-            calibration_collection: The CalibrationCollection to create a
-                snapshot of.
-        """
-        super().__init__()
-
-        if not isinstance(calibration_collection, CalibrationCollection):
-            raise ValueError('Argument must inherit from '
-                             'CalibrationCollection.')
-
-        for key in calibration_collection.get_keys():
-            try:
-                # key's type is tuple(Channel, Origin, Granularity)
-                value = calibration_collection.get(*key)
-            except CalibrationError as calibration_error:
-                # If uncalibrated, store the CalibrationError and raise when a
-                # user has asked for the value.
-                value = calibration_error
-            self._calibrations[key] = value
-
-    def get(self, channel, origin, granularity):
-        """Returns the calibration value for the given key.
-
-        Raises:
-            CalibrationError if the requested key is not calibrated.
-        """
-        value = self._calibrations[(channel, origin, granularity)]
-        if isinstance(value, CalibrationError):
-            # The user requested an uncalibrated value. Raise that error.
-            raise value
-        return value
diff --git a/src/antlion/controllers/monsoon_lib/sampling/engine/transformer.py b/src/antlion/controllers/monsoon_lib/sampling/engine/transformer.py
deleted file mode 100644
index 080a69e..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/engine/transformer.py
+++ /dev/null
@@ -1,221 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import BufferList
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import BufferStream
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import DevNullBufferStream
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import IndexedBuffer
-
-
-class Transformer(object):
-    """An object that represents how to transform a given buffer into a result.
-
-    Attributes:
-        output_stream: The stream to output data to upon transformation.
-            Defaults to a DevNullBufferStream.
-    """
-
-    def __init__(self):
-        self.output_stream = DevNullBufferStream(None)
-
-    def set_output_stream(self, output_stream):
-        """Sets the Transformer's output stream to the given output stream."""
-        self.output_stream = output_stream
-
-    def transform(self, input_stream):
-        """Transforms input_stream data and passes it to self.output_stream.
-
-        Args:
-            input_stream: The BufferStream of input data this transformer should
-                transform. Note that the type of data stored within BufferStream
-                is not guaranteed to be in the format expected, much like STDIN
-                is not guaranteed to be the format a process expects. However,
-                for performance, users should expect the data to be properly
-                formatted anyway.
-        """
-        input_stream.initialize()
-        self.output_stream.initialize()
-        class_name = self.__class__.__qualname__
-        try:
-            logging.debug('%s transformer beginning.', class_name)
-            self.on_begin()
-            logging.debug('%s transformation started.', class_name)
-            self._transform(input_stream)
-        except Exception:
-            # TODO(markdr): Get multi-process error reporting to play nicer.
-            logging.exception('%s ran into an exception.', class_name)
-            raise
-        finally:
-            logging.debug('%s transformation ended.', class_name)
-            self.on_end()
-            logging.debug('%s finished.', class_name)
-
-    def _transform_buffer(self, buffer):
-        """Transforms a given buffer.
-
-        The implementation can either:
-
-        1) Return the transformed buffer. Can be either in-place or a new
-           buffer.
-
-        2) Return a BufferList: a list of transformed buffers. This is useful
-           for grouping data together for faster operations.
-
-        Args:
-            buffer: The buffer to transform
-
-        Returns:
-            either a buffer or a BufferList. See detailed documentation.
-        """
-        raise NotImplementedError()
-
-    def _on_end_of_stream(self, input_stream):
-        """To be called when the input stream has sent the end of stream signal.
-
-        This is particularly useful for flushing any stored memory into the
-        output stream.
-
-        Args:
-            input_stream: the stream that was closed.
-        """
-        # By default, this function closes the output stream.
-        self.output_stream.end_stream()
-
-    def _transform(self, input_stream):
-        """Should call _transform_buffer within this function."""
-        raise NotImplementedError()
-
-    def on_begin(self):
-        """A function called before the transform loop begins."""
-
-    def on_end(self):
-        """A function called after the transform loop has ended."""
-
-
-class SourceTransformer(Transformer):
-    """The base class for generating data in an AssemblyLine.
-
-    Note that any Transformer will be able to generate data, but this class is
-    a generic way to send data.
-
-    Attributes:
-        _buffer_size: The buffer size for each IndexedBuffer sent over the
-            output stream.
-    """
-
-    def __init__(self):
-        super().__init__()
-        # Defaulted to 64, which is small enough to be passed within the .6ms
-        # window, but large enough so that it does not spam the queue.
-        self._buffer_size = 64
-
-    def _transform(self, _):
-        """Generates data and sends it to the output stream."""
-        buffer_index = 0
-        while True:
-            indexed_buffer = IndexedBuffer(buffer_index, self._buffer_size)
-            buffer = self._transform_buffer(indexed_buffer.buffer)
-            if buffer is BufferStream.END:
-                break
-            indexed_buffer.buffer = buffer
-            self.output_stream.add_indexed_buffer(indexed_buffer)
-            buffer_index += 1
-
-        self.output_stream.end_stream()
-
-    def _transform_buffer(self, buffer):
-        """Fills the passed-in buffer with data."""
-        raise NotImplementedError()
-
-
-class SequentialTransformer(Transformer):
-    """A transformer that receives input in sequential order.
-
-    Attributes:
-        _next_index: The index of the next IndexedBuffer that should be read.
-    """
-
-    def __init__(self):
-        super().__init__()
-        self._next_index = 0
-
-    def _transform(self, input_stream):
-        while True:
-            indexed_buffer = input_stream.remove_indexed_buffer()
-            if indexed_buffer is BufferStream.END:
-                break
-            buffer_or_buffers = self._transform_buffer(indexed_buffer.buffer)
-            if buffer_or_buffers is not None:
-                self._send_buffers(buffer_or_buffers)
-
-        self._on_end_of_stream(input_stream)
-
-    def _send_buffers(self, buffer_or_buffer_list):
-        """Sends buffers over to the output_stream.
-
-        Args:
-            buffer_or_buffer_list: A BufferList or buffer object. Note that if
-                buffer is None, it is effectively an end-of-stream signal.
-        """
-        if not isinstance(buffer_or_buffer_list, BufferList):
-            # Assume a single buffer was returned
-            buffer_or_buffer_list = BufferList([buffer_or_buffer_list])
-
-        buffer_list = buffer_or_buffer_list
-        for buffer in buffer_list:
-            new_buffer = IndexedBuffer(self._next_index, buffer)
-            self.output_stream.add_indexed_buffer(new_buffer)
-            self._next_index += 1
-
-    def _transform_buffer(self, buffer):
-        raise NotImplementedError()
-
-
-class ParallelTransformer(Transformer):
-    """A Transformer that is capable of running in parallel.
-
-    Buffers received may be unordered. For ordered input, use
-    SequentialTransformer.
-    """
-
-    def _transform(self, input_stream):
-        while True:
-            indexed_buffer = input_stream.remove_indexed_buffer()
-            if indexed_buffer is None:
-                break
-            buffer = self._transform_buffer(indexed_buffer.buffer)
-            indexed_buffer.buffer = buffer
-            self.output_stream.add_indexed_buffer(indexed_buffer)
-
-        self._on_end_of_stream(input_stream)
-
-    def _transform_buffer(self, buffer):
-        """Transforms a given buffer.
-
-        Note that ParallelTransformers can NOT return a BufferList. This is a
-        limitation with the current indexing system. If the input buffer is
-        replaced with multiple buffers, later transformers will not know what
-        the proper order of buffers is.
-
-        Args:
-            buffer: The buffer to transform
-
-        Returns:
-            either None or a buffer. See detailed documentation.
-        """
-        raise NotImplementedError()
diff --git a/src/antlion/controllers/monsoon_lib/sampling/engine/transformers.py b/src/antlion/controllers/monsoon_lib/sampling/engine/transformers.py
deleted file mode 100644
index a9bdee1..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/engine/transformers.py
+++ /dev/null
@@ -1,246 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import numpy as np
-
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import BufferList
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import ParallelTransformer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import SequentialTransformer
-
-
-class Tee(SequentialTransformer):
-    """Outputs main_current values to the specified file.
-
-    Attributes:
-        _filename: the name of the file to open.
-        _fd: the filestream written to.
-    """
-
-    def __init__(self, filename, measure_after_seconds=0):
-        """Creates an OutputStream.
-
-        Args:
-            filename: the path to the file to write the collected data to.
-            measure_after_seconds: the number of seconds to skip before
-                logging data as part of the measurement.
-        """
-        super().__init__()
-        self._filename = filename
-        self._fd = None
-        self.measure_after_seconds = measure_after_seconds
-        # The time of the first sample gathered.
-        self._start_time = None
-
-    def on_begin(self):
-        self._fd = open(self._filename, 'w+')
-
-    def on_end(self):
-        self._fd.close()
-
-    def _transform_buffer(self, buffer):
-        """Writes the reading values to a file.
-
-        Args:
-            buffer: A list of HvpmReadings.
-        """
-        for sample in buffer:
-            if self._start_time is None:
-                self._start_time = sample.sample_time
-            if (sample.sample_time - self._start_time <
-                    self.measure_after_seconds):
-                continue
-            self._fd.write('%0.9f %.12f\n' %
-                           (sample.sample_time, sample.main_current))
-        self._fd.flush()
-        return BufferList([buffer])
-
-
-class PerfgateTee(SequentialTransformer):
-    """Outputs records of nanoseconds,current,voltage to the specified file.
-
-    Similar to Tee, but this version includes voltage, which may help with
-    accuracy in the power calculations.
-
-    This output type can be enabled by passing this transformer to the
-    transformers kwarg in Monsoon.measure_power():
-
-    # Uses the default Tee
-    > monsoon.measure_power(..., output_path=filename])
-
-    # Uses PerfgateTee
-    > monsoon.measure_power(..., transformers=[PerfgateTee(filename)])
-
-    Attributes:
-        _filename: the name of the file to open.
-        _fd: the filestream written to.
-    """
-
-    def __init__(self, filename, measure_after_seconds=0):
-        """Creates an OutputStream.
-
-        Args:
-            filename: the path to the file to write the collected data to.
-            measure_after_seconds: the number of seconds to skip before logging
-              data as part of the measurement.
-        """
-        super().__init__()
-        self._filename = filename
-        self._fd = None
-        self.measure_after_seconds = measure_after_seconds
-        # The time of the first sample gathered.
-        self._start_time = None
-
-    def on_begin(self):
-        self._fd = open(self._filename, 'w+')
-
-    def on_end(self):
-        self._fd.close()
-
-    def _transform_buffer(self, buffer):
-        """Writes the reading values to a file.
-
-            Args:
-                buffer: A list of HvpmReadings.
-        """
-        for sample in buffer:
-            if self._start_time is None:
-                self._start_time = sample.sample_time
-            if (sample.sample_time - self._start_time <
-                    self.measure_after_seconds):
-                continue
-            self._fd.write(
-                '%i,%.6f,%.6f\n' %
-                (sample.sample_time * 1e9, sample.main_current,
-                 sample.main_voltage))
-        self._fd.flush()
-        return BufferList([buffer])
-
-
-class SampleAggregator(ParallelTransformer):
-    """Aggregates the main current value and the number of samples gathered."""
-
-    def __init__(self, start_after_seconds=0):
-        """Creates a new SampleAggregator.
-
-        Args:
-            start_after_seconds: The number of seconds to wait before gathering
-                data. Useful for allowing the device to settle after USB
-                disconnect.
-        """
-        super().__init__()
-        self._num_samples = 0
-        self._sum_currents = 0
-        self.start_after_seconds = start_after_seconds
-        # The time of the first sample gathered.
-        self._start_time = None
-
-    def _transform_buffer(self, buffer):
-        """Aggregates the sample data.
-
-        Args:
-            buffer: A buffer of H/LvpmReadings.
-        """
-        for sample in buffer:
-            if self._start_time is None:
-                self._start_time = sample.sample_time
-            if sample.sample_time - self._start_time < self.start_after_seconds:
-                continue
-            self._num_samples += 1
-            self._sum_currents += sample.main_current
-        return buffer
-
-    @property
-    def num_samples(self):
-        """The number of samples read from the device."""
-        return self._num_samples
-
-    @property
-    def sum_currents(self):
-        """The total sum of current values gathered so far."""
-        return self._sum_currents
-
-
-class DownSampler(SequentialTransformer):
-    """Takes in sample outputs and returns a downsampled version of that data.
-
-    Note for speed, the downsampling must occur at a perfect integer divisor of
-    the Monsoon's sample rate (5000 hz).
-    """
-    _MONSOON_SAMPLE_RATE = 5000
-
-    def __init__(self, downsample_factor):
-        """Creates a DownSampler Transformer.
-
-        Args:
-            downsample_factor: The number of samples averaged together for a
-                single output sample.
-        """
-        super().__init__()
-
-        self._mean_width = int(downsample_factor)
-        self._leftovers = []
-
-    def _transform_buffer(self, buffer):
-        """Returns the buffer downsampled by an integer factor.
-
-        The algorithm splits data points into three categories:
-
-            tail: The remaining samples where not enough were collected to
-                  reach the integer factor for downsampling. The tail is stored
-                  in self._leftovers between _transform_buffer calls.
-            tailless_buffer: The samples excluding the tail that can be
-                             downsampled directly.
-
-        Below is a diagram explaining the buffer math:
-
-        input:          input buffer n              input buffer n + 1
-                 ╔══════════════════════════╗  ╔══════════════════════════╗
-             ... ║ ╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗ ║  ║ ╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗ ║ ...
-                 ║ ╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝ ║  ║ ╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝ ║
-                 ╚══════════════════════════╝  ╚══════════════════════════╝
-                               ▼                             ▼
-        alg:     ╔═════════════════════╦════╗  ╔═════════════════════╦════╗
-                 ║ ╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗║╔╗╔╗║  ║ ╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗║╔╗╔╗║
-                 ║ ╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝║╚╝╚╝║  ║ ╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝║╚╝╚╝║
-             ... ║   tailless_buffer   ║tail║  ║   tailless_buffer   ║tail║ ...
-                 ╚═════════════════════╩════╝  ╚═════════════════════╩════╝
-               ──┬───┘ └─┬─┘ ...  └─┬─┘ └────┬─────┘ └─┬─┘ ...  └─┬─┘ └──┬───
-                 ╔╗      ╔╗ ╔╗  ╔╗ ╔╗        ╔╗        ╔╗ ╔╗  ╔╗ ╔╗      ╔╗
-                 ╚╝      ╚╝ ╚╝  ╚╝ ╚╝        ╚╝        ╚╝ ╚╝  ╚╝ ╚╝      ╚╝
-                 └─────────┬────────┘        └──────────┬─────────┘
-                           ▼                            ▼
-        output:   ╔════════════════╗           ╔════════════════╗
-                  ║ ╔╗ ╔╗ ╔╗ ╔╗ ╔╗ ║           ║ ╔╗ ╔╗ ╔╗ ╔╗ ╔╗ ║
-                  ║ ╚╝ ╚╝ ╚╝ ╚╝ ╚╝ ║           ║ ╚╝ ╚╝ ╚╝ ╚╝ ╚╝ ║
-                  ╚════════════════╝           ╚════════════════╝
-                   output buffer n             output buffer n + 1
-        """
-        tail_length = int(
-            (len(buffer) + len(self._leftovers)) % self._mean_width)
-
-        tailless_buffer = np.array(buffer[:len(buffer) - tail_length])
-
-        sample_count = len(tailless_buffer) + len(self._leftovers)
-
-        downsampled_values = np.mean(
-            np.resize(
-                np.append(self._leftovers, tailless_buffer),
-                (sample_count // self._mean_width, self._mean_width)),
-            axis=1)
-
-        self._leftovers = buffer[len(buffer) - tail_length:]
-
-        return downsampled_values
diff --git a/src/antlion/controllers/monsoon_lib/sampling/enums.py b/src/antlion/controllers/monsoon_lib/sampling/enums.py
deleted file mode 100644
index 5fc30c9..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/enums.py
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-class Origin:
-    """The origin types of a given measurement or calibration.
-
-    The Monsoon returns calibration packets for three types of origin:
-
-        ZERO: The calibrated zeroing point.
-        REFERENCE: The reference point used for the returned samples.
-        SCALE: The factor at which to scale the returned samples to get power
-               consumption data.
-    """
-    ZERO = 0
-    REFERENCE = 1
-    SCALE = 2
-
-    values = [ZERO, REFERENCE, SCALE]
-
-
-class Granularity:
-    """The granularity types.
-
-    Monsoon leverages two different granularities when returning power
-    measurements. If the power usage exceeds the threshold of the fine
-    measurement region, a coarse measurement will be used instead.
-
-    This also means that there need to be two calibration values: one for coarse
-    and one for fine.
-    """
-    COARSE = 0
-    FINE = 1
-
-    values = [COARSE, FINE]
-
-
-class Reading:
-    """The extraneous possible reading types.
-
-    Aside from coarse and fine readings (see Granularity), some Monsoons can
-    gather readings on the voltage and gain control.
-    """
-    VOLTAGE = 0x4
-    GAIN = 0x6
-
-    values = [VOLTAGE, GAIN]
-
-
-class Channel:
-    """The possible channel types.
-
-    Monsoons can read power measurements from the following three inputs.
-    Calibration and reading values may also be available on these channels.
-    """
-    MAIN = 0
-    USB = 1
-    AUX = 2
-
-    values = [MAIN, USB, AUX]
diff --git a/src/antlion/controllers/monsoon_lib/sampling/hvpm/__init__.py b/src/antlion/controllers/monsoon_lib/sampling/hvpm/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/hvpm/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/monsoon_lib/sampling/hvpm/calibrations.py b/src/antlion/controllers/monsoon_lib/sampling/hvpm/calibrations.py
deleted file mode 100644
index d07d404..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/hvpm/calibrations.py
+++ /dev/null
@@ -1,147 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import itertools
-from collections import deque
-
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationScalars
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationWindows
-from antlion.controllers.monsoon_lib.sampling.enums import Channel
-from antlion.controllers.monsoon_lib.sampling.enums import Granularity
-from antlion.controllers.monsoon_lib.sampling.enums import Origin
-from antlion.controllers.monsoon_lib.sampling.hvpm.packet import SampleType
-
-
-class HvpmCalibrationData(CalibrationWindows):
-    """An object that holds the Dynamic Calibration values for HVPM Sampling."""
-
-    def __init__(self, calibration_window_size=5):
-        super().__init__(calibration_window_size)
-
-        all_variable_sets = [
-            Channel.values,
-            (Origin.REFERENCE, Origin.ZERO),
-            Granularity.values
-        ]  # yapf: disable
-
-        for key in itertools.product(*all_variable_sets):
-            self._calibrations[key] = deque()
-
-    def add_calibration_sample(self, sample):
-        """Adds calibration values from a calibration sample.
-
-        The packet is formatted the following way:
-            [0]: MAIN, COARSE
-            [1]: MAIN, FINE
-            [2]: USB,  COARSE
-            [3]: USB,  FINE
-            [4]: AUX,  COARSE
-            [5]: AUX,  FINE
-            [...]: ?
-            [8]: 0x10 == Origin.ZERO
-                 0x30 == Origin.REFERENCE
-        """
-        sample_type = sample.get_sample_type()
-        if sample_type == SampleType.ZERO_CAL:
-            origin = Origin.ZERO
-        elif sample_type == SampleType.REF_CAL:
-            origin = Origin.REFERENCE
-        else:
-            raise ValueError(
-                'Packet of type %s is not a calibration packet.' % sample_type)
-
-        for i in range(6):
-            # Reads the last bit to get the Granularity value.
-            granularity = i & 0x01
-            # Divides by 2 to get the Channel value.
-            channel = i >> 1
-            self.add(channel, origin, granularity,
-                     sample[channel, granularity])
-
-
-class HvpmCalibrationConstants(CalibrationScalars):
-    """Tracks the calibration values gathered from the Monsoon status packet."""
-
-    def __init__(self, monsoon_status_packet):
-        """Initializes the calibration constants."""
-        super().__init__()
-
-        # Invalid combinations:
-        #   *,   REFERENCE, *
-        #   AUX, ZERO,      *
-        all_variable_sets = [
-            Channel.values,
-            (Origin.SCALE, Origin.ZERO),
-            Granularity.values
-        ]  # yapf: disable
-
-        for key in itertools.product(*all_variable_sets):
-            if key[0] == Channel.AUX and key[1] == Origin.ZERO:
-                # Monsoon status packets do not contain AUX, ZERO readings.
-                # Monsoon defaults these values to 0:
-                self._calibrations[key] = 0
-            else:
-                self._calibrations[key] = getattr(
-                    monsoon_status_packet,
-                    build_status_packet_attribute_name(*key))
-
-
-# TODO(markdr): Potentially find a better home for this function.
-def build_status_packet_attribute_name(channel, origin, granularity):
-    """Creates the status packet attribute name from the given keys.
-
-    The HVPM Monsoon status packet returns values in the following format:
-
-        <channel><Granularity><Origin>
-
-    Note that the following combinations are invalid:
-        <channel><Granularity>Reference
-        aux<Granularity>ZeroOffset
-
-    Args:
-        channel: the Channel value of the attribute
-        origin: the Origin value of the attribute
-        granularity: the Granularity value of the attribute
-
-    Returns:
-        A string that corresponds to the attribute of the Monsoon status packet.
-    """
-    if channel == Channel.MAIN:
-        channel = 'main'
-    elif channel == Channel.USB:
-        channel = 'usb'
-    elif channel == Channel.AUX:
-        channel = 'aux'
-    else:
-        raise ValueError('Unknown channel "%s".' % channel)
-
-    if granularity == Granularity.COARSE:
-        granularity = 'Coarse'
-    elif granularity == Granularity.FINE:
-        granularity = 'Fine'
-    else:
-        raise ValueError('Invalid granularity "%s"' % granularity)
-
-    if origin == Origin.SCALE:
-        origin = 'Scale'
-    elif origin == Origin.ZERO:
-        origin = 'ZeroOffset'
-    else:
-        # Note: Origin.REFERENCE is not valid for monsoon_status_packet
-        # attribute names.
-        raise ValueError('Invalid origin "%s"' % origin)
-
-    return '%s%s%s' % (channel, granularity, origin)
diff --git a/src/antlion/controllers/monsoon_lib/sampling/hvpm/packet.py b/src/antlion/controllers/monsoon_lib/sampling/hvpm/packet.py
deleted file mode 100644
index 8951400..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/hvpm/packet.py
+++ /dev/null
@@ -1,210 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import struct
-
-from antlion.controllers.monsoon_lib.sampling.enums import Reading
-
-
-class SampleType:
-    """An enum-like class that defines the SampleTypes for LVPM data.
-
-    Note that these values differ from the LVPM values.
-    """
-
-    # A measurement sample.
-    MEASUREMENT = 0x00
-
-    # A zero calibration sample.
-    ZERO_CAL = 0x10
-
-    # A reference calibration sample.
-    REF_CAL = 0x30
-
-    @staticmethod
-    def is_calibration(value):
-        """Returns true iff the SampleType is a type of calibration."""
-        return bool(value & 0x10)
-
-
-class HvpmMeasurement(object):
-    """An object that represents a single measurement from the HVPM device.
-
-    Attributes:
-        _sample_time: The time the sample was taken.
-        values: From the Monsoon API doc, the values are as follows:
-
-    Val │  Byte  │  Type  | Monsoon │ Reading │
-    Pos │ Offset │ Format │ Channel │  Type   │ Description
-    ────┼────────┼────────┼─────────┼─────────┼──────────────────────────────
-     0  │    0   │ uint16 │  Main   │ Coarse  │ Calibration/Measurement value
-     1  │    2   │ uint16 │  Main   │ Fine    │ Calibration/Measurement value
-     2  │    4   │ uint16 │  USB    │ Coarse  │ Calibration/Measurement value
-     3  │    6   │ uint16 │  USB    │ Fine    │ Calibration/Measurement value
-     4  │    8   │ uint16 │  Aux    │ Coarse  │ Calibration/Measurement value
-     5  │   10   │ uint16 │  Aux    │ Fine    │ Calibration/Measurement value
-     6  │   12   │ uint16 │  Main   │ Voltage │ Main V measurement, or Aux V
-        │        │        │         │         │    if setVoltageChannel == 1
-     7  │   14   │ uint16 │  USB    │ Voltage │ USB Voltage
-    ╔══════════════════════════════════════════════════════════════════════╗
-    ║ Note: The Monsoon API Doc puts the below values in the wrong order.  ║
-    ║       The values in this docstring are in the correct order.         ║
-    ╚══════════════════════════════════════════════════════════════════════╝
-     8  │   16   │ uint8? │  USB    │ Gain    │ Measurement gain control.
-        │        │        │         │         │  * Structure Unknown. May be
-        │        │        │         │         │    similar to Main Gain.
-     9  │   17   │ uint8  │  Main   │ Gain    │ Measurement gain control.
-        │        │        │         │         │  * b0-3: Believed to be gain.
-        │        │        │         │         │  * b4-5: SampleType.
-        │        │        │         │         │  * b6-7: Unknown.
-
-    """
-
-    # The total number of bytes in a measurement. See the table above.
-    SIZE = 18
-
-    def __init__(self, raw_data, sample_time):
-        self.values = struct.unpack('>8H2B', raw_data)
-        self._sample_time = sample_time
-
-    def __getitem__(self, channel_and_reading_granularity):
-        """Returns the requested reading for the given channel.
-
-        See HvpmMeasurement.__doc__ for a reference table.
-
-        Args:
-            channel_and_reading_granularity: A tuple of (channel,
-                reading_or_granularity).
-        """
-        channel = channel_and_reading_granularity[0]
-        reading_or_granularity = channel_and_reading_granularity[1]
-
-        data_index = self.get_index(channel, reading_or_granularity)
-
-        if reading_or_granularity == Reading.GAIN:
-            # The format of this value is undocumented by Monsoon Inc.
-            # Assume an unsigned 4-bit integer is used.
-            return self.values[data_index] & 0x0F
-        return self.values[data_index]
-
-    @staticmethod
-    def get_index(channel, reading_or_granularity):
-        """Returns the values array index that corresponds with the given query.
-
-        See HvpmMeasurement.__doc__ for details on how this is determined.
-
-        Args:
-            channel: The channel to read data from.
-            reading_or_granularity: The reading or granularity desired.
-
-        Returns:
-            An index corresponding to the data's location in self.values
-        """
-        if reading_or_granularity == Reading.VOLTAGE:
-            return 6 + channel
-        if reading_or_granularity == Reading.GAIN:
-            return 9 - channel
-        # reading_or_granularity is a granularity value.
-        return channel * 2 + reading_or_granularity
-
-    def get_sample_time(self):
-        """Returns the calculated time for the given sample."""
-        return self._sample_time
-
-    def get_sample_type(self):
-        """Returns a value contained in SampleType."""
-        return self.values[9] & 0x30
-
-
-class Packet(object):
-    """A packet collected directly from serial.read() during sample collection.
-
-    Large amounts of documentation here are pulled directly from
-    http://msoon.github.io/powermonitor/Python_Implementation/docs/API.pdf
-
-    For convenience, here is the table of values stored:
-
-    Offset │ Format │ Field            │ Description
-    ───────┼────────┼──────────────────┼────────────────────────────────────────
-       0   │ uint16 │ dropped_count    │ Number of dropped packets
-       2   │  bits  │ flags            │ Flag values. see self.flags property
-       3   │ uint8  │ num_measurements │ Number of measurements in this packet
-       4   │ byte[] │ measurement[0]   │ Measurement. See HvpmMeasurement class
-      22   │ byte[] │ measurement[1]   │ Optional Measurement. See above
-      44   │ byte[] │ measurement[2]   │ Optional Measurement. See above
-
-    Note that all of values except dropped_count are stored in big-endian
-    format.
-
-    Attributes:
-        _packet_data: The raw data received from the packet.
-        time_of_read: The unix timestamp this packet was collected at.
-        time_since_last_sample: The differential between this packet's
-            time_of_read and the previous packet's.
-    """
-
-    FIRST_MEASUREMENT_OFFSET = 8
-
-    # The maximum size of a packet read from USB.
-    # Note: each HVPM Packet can hold a maximum of 3 measurements.
-    MAX_PACKET_SIZE = FIRST_MEASUREMENT_OFFSET + HvpmMeasurement.SIZE * 3
-
-    def __init__(self, sampled_bytes):
-        self._packet_data = sampled_bytes
-
-        num_data_bytes = (len(sampled_bytes) - Packet.FIRST_MEASUREMENT_OFFSET)
-        self.num_measurements = num_data_bytes // HvpmMeasurement.SIZE
-
-        struct_string = (
-            '<2dhBx' +
-            (str(HvpmMeasurement.SIZE) + 's') * self.num_measurements)
-
-        # yapf: disable. Yapf forces these to try to fit one after the other.
-        (self.time_of_read,
-         self.time_since_last_sample,
-         self.dropped_count,
-         self.flags,
-         *samples) = struct.unpack(struct_string, sampled_bytes)
-        # yapf: enable
-
-        self.measurements = [None] * self.num_measurements
-
-        for i, raw_data in enumerate(samples):
-            self.measurements[i] = HvpmMeasurement(raw_data,
-                                                   self._get_sample_time(i))
-
-    def _get_sample_time(self, index):
-        """Returns the time the sample at the given index was received.
-
-        If multiple samples were captured within the same reading, the samples
-        are assumed to be uniformly distributed during the time it took to
-        sample the values.
-        """
-        time_per_sample = self.time_since_last_sample / self.num_measurements
-        return time_per_sample * (index + 1) + self.time_of_read
-
-    @property
-    def packet_counter(self):
-        """The 4-bit packet index."""
-        return self.flags & 0x0F
-
-    def get_bytes(self):
-        return list(self._packet_data)
-
-    def __getitem__(self, index):
-        return self.measurements[index]
-
-    def __len__(self):
-        return self.num_measurements
diff --git a/src/antlion/controllers/monsoon_lib/sampling/hvpm/transformers.py b/src/antlion/controllers/monsoon_lib/sampling/hvpm/transformers.py
deleted file mode 100644
index 775c309..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/hvpm/transformers.py
+++ /dev/null
@@ -1,476 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import array
-import logging
-import struct
-import time
-
-import numpy as np
-from Monsoon import HVPM
-
-from antlion.controllers.monsoon_lib.sampling.common import UncalibratedSampleChunk
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import BufferList
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import ProcessAssemblyLineBuilder
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import ThreadAssemblyLineBuilder
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationError
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationSnapshot
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import ParallelTransformer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import SequentialTransformer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import SourceTransformer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import Transformer
-from antlion.controllers.monsoon_lib.sampling.enums import Channel
-from antlion.controllers.monsoon_lib.sampling.enums import Granularity
-from antlion.controllers.monsoon_lib.sampling.enums import Origin
-from antlion.controllers.monsoon_lib.sampling.enums import Reading
-from antlion.controllers.monsoon_lib.sampling.hvpm.calibrations import HvpmCalibrationConstants
-from antlion.controllers.monsoon_lib.sampling.hvpm.calibrations import HvpmCalibrationData
-from antlion.controllers.monsoon_lib.sampling.hvpm.packet import HvpmMeasurement
-from antlion.controllers.monsoon_lib.sampling.hvpm.packet import Packet
-from antlion.controllers.monsoon_lib.sampling.hvpm.packet import SampleType
-
-
-class HvpmTransformer(Transformer):
-    """Gathers samples from the Monsoon and brings them back to the caller."""
-
-    def __init__(self, monsoon_serial, duration):
-        super().__init__()
-        self.monsoon_serial = monsoon_serial
-        self.duration = duration
-
-    def _transform(self, input_stream):
-        # We need to gather the status packet before sampling so we can use the
-        # static calibration during sample normalization.
-        monsoon = HVPM.Monsoon()
-        monsoon.setup_usb(self.monsoon_serial)
-        monsoon.fillStatusPacket()
-        monsoon_status_packet = monsoon.statusPacket()
-        monsoon.closeDevice()
-
-        # yapf: disable. Yapf doesn't handle fluent interfaces well.
-        (ProcessAssemblyLineBuilder()
-         .source(PacketCollector(self.monsoon_serial, self.duration))
-         .into(SampleNormalizer(monsoon_status_packet=monsoon_status_packet))
-         .build(output_stream=self.output_stream).run())
-        # yapf: enable
-
-
-class PacketCollector(SourceTransformer):
-    """Collects Monsoon packets into a buffer to be sent to another transformer.
-
-    Ideally, the other transformer will be in a separate process to prevent the
-    GIL from slowing down packet collection.
-
-    Attributes:
-        _monsoon_id: The id of the monsoon.
-        _monsoon: The monsoon instance. This is left unset until
-                  _initialize_monsoon() is called.
-    """
-
-    def __init__(self, monsoon_id, sampling_duration=None):
-        super().__init__()
-        self._monsoon_id = monsoon_id
-        self._monsoon = None
-        self.start_time = None
-        self.array = array.array('B', b'\x00' * Packet.MAX_PACKET_SIZE)
-        self.sampling_duration = sampling_duration
-
-    def _initialize_monsoon(self):
-        """Initializes the monsoon object.
-
-        Note that this must be done after the Transformer has started.
-        Otherwise, this transformer will have c-like objects, preventing
-        the transformer from being used with the multiprocess libraries.
-        """
-        self._monsoon = HVPM.Monsoon()
-        self._monsoon.setup_usb(self._monsoon_id)
-        self._monsoon.stopSampling()
-        self._monsoon.fillStatusPacket()
-        self._monsoon.StartSampling()
-
-    def on_begin(self):
-        if __debug__:
-            logging.warning(
-                'Debug mode is enabled. Expect a higher frequency of dropped '
-                'packets. To reduce packet drop, disable your python debugger.'
-            )
-
-        self.start_time = time.time()
-        self._initialize_monsoon()
-
-    def __del__(self):
-        if self._monsoon:
-            self.on_end()
-
-    def on_end(self):
-        self._monsoon.stopSampling()
-        self._monsoon.closeDevice()
-
-    def _transform_buffer(self, buffer):
-        """Fills the buffer with packets until time has been reached.
-
-        Returns:
-            A BufferList of a single buffer if collection is not yet finished.
-            None if sampling is complete.
-        """
-        if (self.sampling_duration
-                and self.sampling_duration < time.time() - self.start_time):
-            return None
-
-        for index in range(len(buffer)):
-            time_before_read = time.time()
-            try:
-                data = self._monsoon.Protocol.DEVICE.read(
-                    # Magic value for USB bulk reads.
-                    0x81,
-                    Packet.MAX_PACKET_SIZE,
-                    # In milliseconds.
-                    timeout=1000)
-            except Exception as e:
-                logging.warning(e)
-                continue
-            time_after_read = time.time()
-            time_data = struct.pack('dd', time_after_read,
-                                    time_after_read - time_before_read)
-            buffer[index] = time_data + data.tobytes()
-
-        return buffer
-
-
-class SampleNormalizer(Transformer):
-    """A Transformer that applies calibration to the input's packets."""
-
-    def __init__(self, monsoon_status_packet):
-        """Creates a SampleNormalizer.
-
-        Args:
-            monsoon_status_packet: The status of the monsoon. Used for gathering
-                the constant calibration data from the device.
-        """
-        super().__init__()
-        self.monsoon_status_packet = monsoon_status_packet
-
-    def _transform(self, input_stream):
-        # yapf: disable. Yapf doesn't handle fluent interfaces well.
-        (ThreadAssemblyLineBuilder()
-         .source(PacketReader(), input_stream=input_stream)
-         .into(SampleChunker())
-         .into(CalibrationApplier(self.monsoon_status_packet))
-         .build(output_stream=self.output_stream).run())
-        # yapf: enable
-
-
-class PacketReader(ParallelTransformer):
-    """Reads raw HVPM Monsoon data and converts it into Packet objects.
-
-    Attributes:
-        rollover_count: The number of times the dropped_count value has rolled
-            over it's maximum value (2^16-1).
-        previous_dropped_count: The dropped count read from the last packet.
-            Used for determining the true number of dropped samples.
-        start_time: The time of the first packet ever read.
-    """
-    """The number of seconds before considering dropped_count to be meaningful.
-
-    Monsoon devices will often report 2^16-1 as the dropped count when first
-    starting the monsoon. This usually goes away within a few milliseconds.
-    """
-    DROP_COUNT_TIMER_THRESHOLD = 1
-
-    def __init__(self):
-        super().__init__()
-        self.rollover_count = 0
-        self.previous_dropped_count = 0
-        self.start_time = 0
-
-    def _transform_buffer(self, buffer):
-        """Reads raw sample data and converts it into packet objects."""
-
-        for i in range(len(buffer)):
-            buffer[i] = Packet(buffer[i])
-
-            if buffer and not self.start_time and i == 0:
-                self.start_time = buffer[0].time_of_read
-
-            if (buffer[i].time_of_read - self.start_time >
-                    PacketReader.DROP_COUNT_TIMER_THRESHOLD):
-                self._process_dropped_count(buffer[i])
-
-        return buffer
-
-    def _process_dropped_count(self, packet):
-        """Processes the dropped count value, updating the internal counters."""
-        if packet.dropped_count == self.previous_dropped_count:
-            return
-
-        if packet.dropped_count < self.previous_dropped_count:
-            self.rollover_count += 1
-
-        self.previous_dropped_count = packet.dropped_count
-        log_function = logging.info if __debug__ else logging.warning
-        log_function('At %9f, total dropped count: %s' %
-                     (packet.time_of_read, self.total_dropped_count))
-
-    @property
-    def total_dropped_count(self):
-        """Returns the total dropped count, accounting for rollovers."""
-        return self.rollover_count * 2**16 + self.previous_dropped_count
-
-    def on_begin(self):
-        if __debug__:
-            logging.info(
-                'The python debugger is enabled. Expect results to '
-                'take longer to process after collection is complete.')
-
-    def on_end(self):
-        if self.previous_dropped_count > 0:
-            if __debug__:
-                logging.info(
-                    'During collection, a total of %d packets were '
-                    'dropped. To reduce this amount, run your test '
-                    'without debug mode enabled.' % self.total_dropped_count)
-            else:
-                logging.warning(
-                    'During collection, a total of %d packets were '
-                    'dropped.' % self.total_dropped_count)
-
-
-class SampleChunker(SequentialTransformer):
-    """Chunks input packets into lists of samples with identical calibration.
-
-    This step helps to quickly apply calibration across many samples at once.
-
-    Attributes:
-        _stored_raw_samples: The queue of raw samples that have yet to be
-            split into a new calibration group.
-        calibration_data: The calibration window information.
-    """
-
-    def __init__(self):
-        super().__init__()
-        self._stored_raw_samples = []
-        self.calibration_data = HvpmCalibrationData()
-
-    def _on_end_of_stream(self, input_stream):
-        self._send_buffers(BufferList([self._cut_new_buffer()]))
-        super()._on_end_of_stream(input_stream)
-
-    def _transform_buffer(self, buffer):
-        """Takes in data from the buffer and splits it based on calibration.
-
-        This transformer is meant to after the PacketReader.
-
-        Args:
-            buffer: A list of Packet objects.
-
-        Returns:
-            A BufferList containing 0 or more UncalibratedSampleChunk objects.
-        """
-        buffer_list = BufferList()
-        for packet in buffer:
-            for sample in packet:
-                sample_type = sample.get_sample_type()
-
-                if sample_type == SampleType.MEASUREMENT:
-                    self._stored_raw_samples.append(sample)
-                elif SampleType.is_calibration(sample_type):
-                    if len(self._stored_raw_samples) > 0:
-                        buffer_list.append(self._cut_new_buffer())
-                    self.calibration_data.add_calibration_sample(sample)
-                else:
-                    # There's no information on what this packet means within
-                    # the documentation or code Monsoon Inc. provides.
-                    logging.warning('Received unidentifiable packet with '
-                                    'SampleType %s: %s' % (sample_type,
-                                                           packet.get_bytes()))
-        return buffer_list
-
-    def _cut_new_buffer(self):
-        """Cuts a new buffer from the input stream data.
-
-        Returns:
-            The newly generated UncalibratedSampleChunk.
-        """
-        calibration_snapshot = CalibrationSnapshot(self.calibration_data)
-        new_chunk = UncalibratedSampleChunk(self._stored_raw_samples,
-                                            calibration_snapshot)
-        # Do not clear the list. Instead, create a new one so the old list can
-        # be owned solely by the UncalibratedSampleChunk.
-        self._stored_raw_samples = []
-        return new_chunk
-
-
-class HvpmReading(object):
-    """The result of fully calibrating a sample. Contains all Monsoon readings.
-
-    Attributes:
-        _reading_list: The list of values obtained from the Monsoon.
-        _time_of_reading: The time since sampling began that the reading was
-            collected at.
-    """
-
-    def __init__(self, reading_list, time_of_reading):
-        """
-        Args:
-            reading_list: A list of reading values in the order of:
-                [0] Main Current
-                [1] USB Current
-                [2] Aux Current
-                [3] Main Voltage
-                [4] USB Voltage
-            time_of_reading: The time the reading was received.
-        """
-        self._reading_list = reading_list
-        self._time_of_reading = time_of_reading
-
-    @property
-    def main_current(self):
-        return self._reading_list[0]
-
-    @property
-    def usb_current(self):
-        return self._reading_list[1]
-
-    @property
-    def aux_current(self):
-        return self._reading_list[2]
-
-    @property
-    def main_voltage(self):
-        return self._reading_list[3]
-
-    @property
-    def usb_voltage(self):
-        return self._reading_list[4]
-
-    @property
-    def sample_time(self):
-        return self._time_of_reading
-
-    def __add__(self, other):
-        return HvpmReading([
-            self.main_current + other.main_current,
-            self.usb_current + other.usb_current,
-            self.aux_current + other.aux_current,
-            self.main_voltage + other.main_voltage,
-            self.usb_voltage + other.usb_voltage,
-        ], self.sample_time + other.sample_time)
-
-    def __truediv__(self, other):
-        return HvpmReading([
-            self.main_current / other,
-            self.usb_current / other,
-            self.aux_current / other,
-            self.main_voltage / other,
-            self.usb_voltage / other,
-        ], self.sample_time / other)
-
-
-class CalibrationApplier(ParallelTransformer):
-    """Applies the calibration formula to the all given samples."""
-
-    def __init__(self, monsoon_status_packet):
-        super().__init__()
-        self.cal_constants = HvpmCalibrationConstants(monsoon_status_packet)
-        monsoon = HVPM.Monsoon()
-        self.fine_threshold = monsoon.fineThreshold
-        self._main_voltage_scale = monsoon.mainvoltageScale
-        self._usb_voltage_scale = monsoon.usbVoltageScale
-        # According to Monsoon.sampleEngine.__ADCRatio, each tick of the ADC
-        # represents this much voltage
-        self._adc_ratio = 6.25e-5
-
-    @staticmethod
-    def _is_device_calibrated(data):
-        """Checks to see if the Monsoon has completed calibration.
-
-        Args:
-            data: the calibration data.
-
-        Returns:
-            True if the data is calibrated. False otherwise.
-        """
-        try:
-            # If the data is calibrated for any Origin.REFERENCE value, it is
-            # calibrated for all Origin.REFERENCE values. The same is true for
-            # Origin.ZERO.
-            data.get(Channel.MAIN, Origin.REFERENCE, Granularity.COARSE)
-            data.get(Channel.MAIN, Origin.ZERO, Granularity.COARSE)
-        except CalibrationError:
-            return False
-        return True
-
-    def _transform_buffer(self, buffer):
-        """Transforms the buffer's information into HvpmReadings.
-
-        Args:
-            buffer: An UncalibratedSampleChunk. This buffer is in-place
-                transformed into a buffer of HvpmReadings.
-        """
-        calibration_data = buffer.calibration_data
-
-        if not self._is_device_calibrated(calibration_data):
-            buffer.samples.clear()
-            return buffer.samples
-
-        readings = np.zeros((len(buffer.samples), 5))
-
-        measurements = np.array([sample.values for sample in buffer.samples])
-        calibrated_value = np.zeros((len(buffer.samples), 2))
-
-        for channel in Channel.values:
-            for granularity in Granularity.values:
-                scale = self.cal_constants.get(channel, Origin.SCALE,
-                                               granularity)
-                zero_offset = self.cal_constants.get(channel, Origin.ZERO,
-                                                     granularity)
-                cal_ref = calibration_data.get(channel, Origin.REFERENCE,
-                                               granularity)
-                cal_zero = calibration_data.get(channel, Origin.ZERO,
-                                                granularity)
-                zero_offset += cal_zero
-                if cal_ref - zero_offset != 0:
-                    slope = scale / (cal_ref - zero_offset)
-                else:
-                    slope = 0
-                if granularity == Granularity.FINE:
-                    slope /= 1000
-
-                index = HvpmMeasurement.get_index(channel, granularity)
-                calibrated_value[:, granularity] = slope * (
-                    measurements[:, index] - zero_offset)
-
-            fine_data_position = HvpmMeasurement.get_index(
-                channel, Granularity.FINE)
-            readings[:, channel] = np.where(
-                measurements[:, fine_data_position] < self.fine_threshold,
-                calibrated_value[:, Granularity.FINE],
-                calibrated_value[:, Granularity.COARSE]) / 1000.0  # to mA
-
-        main_voltage_index = HvpmMeasurement.get_index(Channel.MAIN,
-                                                       Reading.VOLTAGE)
-        usb_voltage_index = HvpmMeasurement.get_index(Channel.USB,
-                                                      Reading.VOLTAGE)
-        readings[:, 3] = (measurements[:, main_voltage_index] * self._adc_ratio
-                          * self._main_voltage_scale)
-        readings[:, 4] = (measurements[:, usb_voltage_index] * self._adc_ratio
-                          * self._usb_voltage_scale)
-
-        for i in range(len(buffer.samples)):
-            buffer.samples[i] = HvpmReading(
-                list(readings[i]), buffer.samples[i].get_sample_time())
-
-        return buffer.samples
diff --git a/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/__init__.py b/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/calibrations.py b/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/calibrations.py
deleted file mode 100644
index d9f5fdb..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/calibrations.py
+++ /dev/null
@@ -1,103 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Note: These calibration classes are based on the original reverse-engineered
-algorithm for handling calibration values. As a result, LvpmCalibrationConstants
-does not exist for the LVPM stock sampling algorithm."""
-
-import itertools
-from collections import deque
-
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationWindows
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationSnapshot
-from antlion.controllers.monsoon_lib.sampling.enums import Channel
-from antlion.controllers.monsoon_lib.sampling.enums import Granularity
-from antlion.controllers.monsoon_lib.sampling.enums import Origin
-from antlion.controllers.monsoon_lib.sampling.lvpm_stock.packet import SampleType
-
-# The numerator used for FINE granularity calibration.
-_FINE_NUMERATOR = .0332
-
-# The numerator used for COARSE granularity calibration
-_COARSE_NUMERATOR = 2.88
-
-
-class LvpmCalibrationData(CalibrationWindows):
-    """An object that holds the Dynamic Calibration values for HVPM Sampling."""
-
-    def __init__(self, calibration_window_size=5):
-        super().__init__(calibration_window_size)
-
-        all_variable_sets = [
-            Channel.values,
-            (Origin.REFERENCE, Origin.ZERO),
-            Granularity.values
-        ]  # yapf: disable
-
-        for key in itertools.product(*all_variable_sets):
-            self._calibrations[key] = deque()
-
-    def add_calibration_sample(self, sample):
-        """Adds calibration values from a calibration sample.
-
-        LVPM Calibration Data is stored as:
-            [0]: Main Current calibration
-            [1]: USB Current calibration
-            [2]: Aux Current calibration
-            [3]: Main Voltage (unknown if this is actually calibration or a
-                               measurement!)
-
-        Note that coarse vs fine is determined by the position within the
-        packet. Even indexes are fine values, odd indexes are coarse values.
-        """
-        sample_type = sample.get_sample_type()
-        if sample_type == SampleType.ZERO_CAL:
-            origin = Origin.ZERO
-        elif sample_type == SampleType.REF_CAL:
-            origin = Origin.REFERENCE
-        else:
-            raise ValueError(
-                'Packet of type %s is not a calibration packet.' % sample_type)
-        granularity = sample.get_calibration_granularity()
-        for channel in Channel.values:
-            self.add(channel, origin, granularity, sample[channel])
-
-
-class LvpmCalibrationSnapshot(CalibrationSnapshot):
-    """A class that holds a snapshot of LVPM Calibration Data.
-
-    According to the original reverse-engineered algorithm for obtaining
-    samples, the LVPM determines scale from the reference and zero calibration
-    values. Here, we calculate those when taking a snapshot."""
-
-    def __init__(self, lvpm_calibration_base):
-        super().__init__(lvpm_calibration_base)
-        pairs = itertools.product(Channel.values, Granularity.values)
-
-        for channel, granularity in pairs:
-            if granularity == Granularity.COARSE:
-                numerator = _COARSE_NUMERATOR
-            else:
-                numerator = _FINE_NUMERATOR
-
-            divisor = (
-                self._calibrations[(channel, Origin.REFERENCE, granularity)] -
-                self._calibrations[(channel, Origin.ZERO, granularity)])
-            # Prevent division by zero.
-            if divisor == 0:
-                divisor = .0001
-
-            self._calibrations[(channel, Origin.SCALE,
-                                granularity)] = (numerator / divisor)
diff --git a/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/packet.py b/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/packet.py
deleted file mode 100644
index 75d7af3..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/packet.py
+++ /dev/null
@@ -1,222 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import struct
-
-from antlion.controllers.monsoon_lib.sampling.enums import Reading
-from antlion.controllers.monsoon_lib.sampling.enums import Granularity
-
-
-class SampleType:
-    """An enum-like class that defines the SampleTypes for LVPM data.
-
-    Note that these values differ from the HVPM values.
-    """
-
-    # A measurement sample.
-    MEASUREMENT = 0x00
-
-    # A zero calibration sample.
-    ZERO_CAL = 0x01
-
-    # A reference calibration sample.
-    REF_CAL = 0x02
-
-    @staticmethod
-    def is_calibration(value):
-        """Returns true iff the SampleType is a type of calibration."""
-        return value == SampleType.ZERO_CAL or value == SampleType.REF_CAL
-
-
-class LvpmMeasurement(object):
-    """An object that tracks an individual measurement within the LvpmPacket.
-
-    Attributes:
-        _sample_time: The time the sample was taken.
-        _sample_type: The type of sample stored.
-        values: From reverse engineering, the values are as follows:
-
-
-    If the measurement is a calibration measurement:
-
-    Val │  Byte  │  Type  │ Monsoon │ Reading │
-    Pos │ Offset │ Format │ Channel │  Type   │ Description
-    ────┼────────┼────────┼─────────┼─────────┼──────────────────────────────
-     0  │   0    │  int16 │  Main   │ Current │ Calibration value.
-     1  │   2    │  int16 │  USB    │ Current │ Calibration value.
-     2  │   4    │  int16 │  Aux    │ Current │ Calibration value.
-     3  │   6    │ uint16 │  Main   │ Voltage │ Calibration value.
-
-    If the measurement is a power reading:
-
-    Val │  Byte  │  Type  │ Monsoon │ Reading │
-    Pos │ Offset │ Format │ Channel │  Type   │ Description
-    ────┼────────┼────────┼─────────┼─────────┼──────────────────────────────
-     0  │   0    │  int16 │  Main   │ Current │ b0: if 1, Coarse, else Fine
-        │        │        │         │         │ b1-7: Measurement value.
-     1  │   2    │  int16 │  USB    │ Current │ b0: if 1, Coarse, else Fine
-        │        │        │         │         │ b1-7: Measurement value.
-     2  │   4    │  int16 │  Aux    │ Current │ b0: if 1, Coarse, else Fine
-        │        │        │         │         │ b1-7: Measurement value.
-     3  │   6    │ uint16 │  Main   │ Voltage │ Measurement value.
-
-    """
-
-    # The total number of bytes in a measurement. See the table above.
-    SIZE = 8
-
-    def __init__(self, raw_data, sample_time, sample_type, entry_index):
-        """Creates a new LVPM Measurement.
-
-        Args:
-            raw_data: The raw data format of the LvpmMeasurement.
-            sample_time: The time the sample was recorded.
-            sample_type: The type of sample that was recorded.
-            entry_index: The index of the measurement within the packet.
-        """
-        self.values = struct.unpack('>3hH', raw_data)
-        self._sample_time = sample_time
-        self._sample_type = sample_type
-
-        if SampleType.is_calibration(self._sample_type):
-            # Calibration packets have granularity values determined by whether
-            # or not the entry was odd or even within the returned packet.
-            if entry_index % 2 == 0:
-                self._granularity = Granularity.FINE
-            else:
-                self._granularity = Granularity.COARSE
-        else:
-            # If it is not a calibration packet, each individual reading (main
-            # current, usb current, etc) determines granularity value by
-            # checking the LSB of the measurement value.
-            self._granularity = None
-
-    def __getitem__(self, channel_or_reading):
-        """Returns the requested reading for the given channel.
-
-        Args:
-            channel_or_reading: either a Channel or Reading.Voltage.
-        """
-        if channel_or_reading == Reading.VOLTAGE:
-            return self.values[3]
-        else:
-            # Must be a channel. If it is not, this line will throw an
-            # IndexError, which is what we will want for invalid values.
-            return self.values[channel_or_reading]
-
-    def get_sample_time(self):
-        """Returns the time (since the start time) this sample was collected."""
-        return self._sample_time
-
-    def get_sample_type(self):
-        """Returns a value contained in SampleType."""
-        return self._sample_type
-
-    def get_calibration_granularity(self):
-        """Returns the granularity associated with this packet.
-
-        If the packet is not a calibration packet, None is returned.
-        """
-        return self._granularity
-
-
-class Packet(object):
-    """A packet collected directly from serial.read() during sample collection.
-
-    Note that the true documentation for this has been lost to time. This class
-    and documentation uses knowledge that comes from several reverse-engineering
-    projects. Most of this knowledge comes from
-    http://wiki/Main/MonsoonProtocol.
-
-    The data table looks approximately like this:
-
-    Offset │ Format  │ Field   │ Description
-    ───────┼─────────┼─────────┼────────────────────────────────────────────
-       0   │  uint8  │  flags  │ Bits:
-           │         │    &    │  * b0-3: Sequence number (0-15). Increments
-           │         │   seq   │          each packet
-           │         │         │  * b4: 1 means over-current or thermal kill
-           │         │         │  * b5: Main Output, 1 == unit is at voltage,
-           │         │         │                     0 == output disabled.
-           │         │         │  * b6-7: reserved.
-       1   │  uint8  │ packet  │ The type of the packet:
-           │         │  type   │   * 0: A data packet
-           │         │         │   * 1: A zero calibration packet
-           │         │         │   * 2: A reference calibration packet
-       2   │  uint8  │ unknown │ Always seems to be 0x00
-       3   │  uint8  │ unknown │ Always seems to be 0x00 or 0xC4.
-       4   │ byte[8] │   data  │ See LvpmMeasurement.
-      ...  │ byte[8] │   data  │ Additional LvpmMeasurements.
-      -1   │  uint8  │ unknown │ Last byte, unknown values. Has been seen to
-           │         │         │ usually be \x00, or \x84.
-
-    Attributes:
-        _packet_data: The raw data received from the packet.
-        time_of_read: The unix timestamp this packet was collected at.
-        time_since_last_sample: The difference between this packet's
-            time_of_read and the previous packet's.
-    """
-
-    # The number of bytes before the first packet.
-    FIRST_MEASUREMENT_OFFSET = 4
-
-    def __init__(self, sampled_bytes, time_of_read,
-                 time_since_last_sample):
-        self._packet_data = sampled_bytes
-        self.time_of_read = time_of_read
-        self.time_since_last_sample = time_since_last_sample
-
-        num_data_bytes = len(sampled_bytes) - Packet.FIRST_MEASUREMENT_OFFSET
-        num_packets = num_data_bytes // LvpmMeasurement.SIZE
-
-        sample_struct_format = (str(LvpmMeasurement.SIZE) + 's') * num_packets
-        struct_string = '>2B2x%sx' % sample_struct_format
-
-        self._flag_data, self.packet_type, *samples = struct.unpack(
-            struct_string, sampled_bytes)
-
-        self.measurements = [None] * len(samples)
-
-        for index, raw_measurement in enumerate(samples):
-            self.measurements[index] = LvpmMeasurement(
-                raw_measurement, self._get_sample_time(index),
-                self.packet_type, index)
-
-    def _get_sample_time(self, index):
-        """Returns the time the sample at the given index was received.
-
-        If multiple samples were captured within the same reading, the samples
-        are assumed to be uniformly distributed during the time it took to
-        sample the values.
-
-        Args:
-            index: the index of the individual reading from within the sample.
-        """
-        time_per_sample = self.time_since_last_sample / len(self.measurements)
-        return time_per_sample * (index + 1) + self.time_of_read
-
-    @property
-    def packet_counter(self):
-        return self._flag_data & 0x0F
-
-    def get_bytes(self, start, end_exclusive):
-        """Returns a bytearray spanning from start to the end (exclusive)."""
-        return self._packet_data[start:end_exclusive]
-
-    def __getitem__(self, index):
-        return self.measurements[index]
-
-    def __len__(self):
-        return len(self.measurements)
diff --git a/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/stock_transformers.py b/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/stock_transformers.py
deleted file mode 100644
index 39658dd..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/stock_transformers.py
+++ /dev/null
@@ -1,389 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import struct
-import time
-
-import numpy as np
-
-from antlion.controllers.monsoon_lib.api.lvpm_stock.monsoon_proxy import MonsoonProxy
-from antlion.controllers.monsoon_lib.sampling.common import UncalibratedSampleChunk
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import BufferList
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import ProcessAssemblyLineBuilder
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import ThreadAssemblyLineBuilder
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationError
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import ParallelTransformer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import SequentialTransformer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import SourceTransformer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import Transformer
-from antlion.controllers.monsoon_lib.sampling.enums import Channel
-from antlion.controllers.monsoon_lib.sampling.enums import Granularity
-from antlion.controllers.monsoon_lib.sampling.enums import Origin
-from antlion.controllers.monsoon_lib.sampling.lvpm_stock.calibrations import LvpmCalibrationData
-from antlion.controllers.monsoon_lib.sampling.lvpm_stock.calibrations import LvpmCalibrationSnapshot
-from antlion.controllers.monsoon_lib.sampling.lvpm_stock.packet import Packet
-from antlion.controllers.monsoon_lib.sampling.lvpm_stock.packet import SampleType
-
-
-class StockLvpmSampler(Transformer):
-    """Gathers samples from the Monsoon and brings them back to the caller."""
-
-    def __init__(self, monsoon_serial, duration):
-        super().__init__()
-        self.monsoon_serial = monsoon_serial
-        self.duration = duration
-
-    def _transform(self, input_stream):
-        # yapf: disable. Yapf doesn't handle fluent interfaces well.
-        (ProcessAssemblyLineBuilder()
-         .source(PacketCollector(self.monsoon_serial, self.duration))
-         .into(SampleNormalizer())
-         .build(output_stream=self.output_stream)
-         .run())
-        # yapf: enable
-
-
-class PacketCollector(SourceTransformer):
-    """Collects Monsoon packets into a buffer to be sent to another process."""
-
-    def __init__(self, serial=None, sampling_duration=None):
-        super().__init__()
-        self._monsoon_serial = serial
-        self._monsoon_proxy = None
-        self.start_time = 0
-        self.sampling_duration = sampling_duration
-
-    def _initialize_monsoon(self):
-        """Initializes the MonsoonProxy object."""
-        self._monsoon_proxy = MonsoonProxy(serialno=self._monsoon_serial)
-
-    def on_begin(self):
-        """Begins data collection."""
-        self.start_time = time.time()
-        self._initialize_monsoon()
-        self._monsoon_proxy.start_data_collection()
-
-    def on_end(self):
-        """Stops data collection."""
-        self._monsoon_proxy.stop_data_collection()
-        self._monsoon_proxy.ser.close()
-
-    def _transform_buffer(self, buffer):
-        """Fills the given buffer with raw monsoon data at each entry."""
-        if (self.sampling_duration
-                and self.sampling_duration < time.time() - self.start_time):
-            return None
-
-        for index in range(len(buffer)):
-            time_before_read = time.time()
-            data = self._read_packet()
-            if data is None:
-                continue
-            time_after_read = time.time()
-            time_data = struct.pack('dd', time_after_read,
-                                    time_after_read - time_before_read)
-            buffer[index] = time_data + data
-
-        return buffer
-
-    def _read_packet(self):
-        """Reads a single packet from the serial port.
-
-        Packets are sent as Length-Value-Checksum, where the first byte is the
-        length, the following bytes are the value and checksum. The checksum is
-        the stored in the final byte, and is calculated as the 16 least-
-        significant-bits of the sum of all value bytes.
-
-        Returns:
-            None if the read failed. Otherwise, the packet data received.
-        """
-        len_char = self._monsoon_proxy.ser.read(1)
-        if not len_char:
-            logging.warning('Reading from serial timed out.')
-            return None
-
-        data_len = ord(len_char)
-        if not data_len:
-            logging.warning('Unable to read packet length.')
-            return None
-
-        result = self._monsoon_proxy.ser.read(int(data_len))
-        result = bytearray(result)
-        if len(result) != data_len:
-            logging.warning(
-                'Length mismatch, expected %d bytes, got %d bytes.', data_len,
-                len(result))
-            return None
-        body = result[:-1]
-        checksum = sum(body, data_len) & 0xFF
-        if result[-1] != checksum:
-            logging.warning(
-                'Invalid checksum from serial port! Expected %s, '
-                'got %s', hex(checksum), hex(result[-1]))
-            return None
-        return body
-
-
-class SampleNormalizer(Transformer):
-    """Normalizes the raw packet data into reading values."""
-
-    def _transform(self, input_stream):
-        # yapf: disable. Yapf doesn't handle fluent interfaces well.
-        (ThreadAssemblyLineBuilder()
-         .source(PacketReader(), input_stream=input_stream)
-         .into(SampleChunker())
-         .into(CalibrationApplier())
-         .build(output_stream=self.output_stream)
-         .run())
-        # yapf: enable
-
-    def _transform_buffer(self, buffer):
-        """_transform is overloaded, so this function can be left empty."""
-
-
-class PacketReader(ParallelTransformer):
-    """Reads the raw packets and converts them into LVPM Packet objects."""
-
-    def _transform_buffer(self, buffer):
-        """Converts the raw packets to Packet objects in-place in buffer.
-
-        Args:
-            buffer: A list of bytes objects. Will be in-place replaced with
-                Packet objects.
-        """
-        for i, packet in enumerate(buffer):
-            time_bytes_size = struct.calcsize('dd')
-            # Unpacks the two time.time() values sent by PacketCollector.
-            time_of_read, time_since_last_read = struct.unpack(
-                'dd', packet[:time_bytes_size])
-            packet = packet[time_bytes_size:]
-            # Magic number explanation:
-            # LVPM sample packets begin with 4 bytes, have at least one
-            # measurement (8 bytes), and have 1 last byte (usually a \x00 byte).
-            if len(packet) < 4 + 8 + 1 or packet[0] & 0x20 != 0x20:
-                logging.warning(
-                    'Tried to collect power sample values, received data of '
-                    'type=0x%02x, len=%d instead.', packet[0], len(packet))
-                buffer[i] = None
-                continue
-
-            buffer[i] = Packet(packet, time_of_read, time_since_last_read)
-
-        return buffer
-
-
-class SampleChunker(SequentialTransformer):
-    """Chunks input packets into lists of samples with identical calibration.
-
-    This step helps to quickly apply calibration across many samples at once.
-
-    Attributes:
-        _stored_raw_samples: The queue of raw samples that have yet to be
-            split into a new calibration group.
-        calibration_data: The calibration window information.
-    """
-
-    def __init__(self):
-        super().__init__()
-        self._stored_raw_samples = []
-        self.calibration_data = LvpmCalibrationData()
-
-    def _on_end_of_stream(self, input_stream):
-        self._send_buffers(BufferList([self._cut_new_buffer()]))
-        super()._on_end_of_stream(input_stream)
-
-    def _transform_buffer(self, buffer):
-        """Takes in data from the buffer and splits it based on calibration.
-
-        This transformer is meant to after the PacketReader.
-
-        Args:
-            buffer: A list of Packet objects.
-
-        Returns:
-            A BufferList containing 0 or more UncalibratedSampleChunk objects.
-        """
-        buffer_list = BufferList()
-        for packet in buffer:
-            # If a read packet was not a sample, the PacketReader returns None.
-            # Skip over these dud values.
-            if packet is None:
-                continue
-
-            for sample in packet:
-                sample_type = sample.get_sample_type()
-
-                if sample_type == SampleType.MEASUREMENT:
-                    self._stored_raw_samples.append(sample)
-                elif SampleType.is_calibration(sample_type):
-                    if len(self._stored_raw_samples) > 0:
-                        buffer_list.append(self._cut_new_buffer())
-                    self.calibration_data.add_calibration_sample(sample)
-                else:
-                    # There's no information on what this packet means within
-                    # Monsoon documentation or code.
-                    logging.warning('Received unidentifiable packet with '
-                                    'SampleType %s: %s' %
-                                    (sample_type, packet.get_bytes(0, None)))
-        return buffer_list
-
-    def _cut_new_buffer(self):
-        """Cuts a new buffer from the input stream data.
-
-        Returns:
-            The newly generated UncalibratedSampleChunk.
-        """
-        calibration_snapshot = LvpmCalibrationSnapshot(self.calibration_data)
-        new_chunk = UncalibratedSampleChunk(self._stored_raw_samples,
-                                            calibration_snapshot)
-        self._stored_raw_samples = []
-        return new_chunk
-
-
-class LvpmReading(object):
-    """The result of fully calibrating a sample. Contains all Monsoon readings.
-
-    Attributes:
-        _reading_list: The list of values obtained from the Monsoon.
-        _time_of_reading: The time since sampling began that the reading was
-            collected at.
-    """
-
-    def __init__(self, reading_list, time_of_reading):
-        """Creates an LvpmReading.
-
-        Args:
-            reading_list:
-                [0] Main Current
-                [1] USB Current
-                [2] Aux Current
-                [3] Main Voltage
-            time_of_reading: The time the reading was received.
-        """
-        self._reading_list = reading_list
-        self._time_of_reading = time_of_reading
-
-    @property
-    def main_current(self):
-        return self._reading_list[0]
-
-    @property
-    def usb_current(self):
-        return self._reading_list[1]
-
-    @property
-    def aux_current(self):
-        return self._reading_list[2]
-
-    @property
-    def main_voltage(self):
-        return self._reading_list[3]
-
-    @property
-    def sample_time(self):
-        return self._time_of_reading
-
-    def __add__(self, other):
-        reading_list = [
-            self.main_current + other.main_current,
-            self.usb_current + other.usb_current,
-            self.aux_current + other.aux_current,
-            self.main_voltage + other.main_voltage,
-        ]
-        sample_time = self.sample_time + other.sample_time
-
-        return LvpmReading(reading_list, sample_time)
-
-    def __truediv__(self, other):
-        reading_list = [
-            self.main_current / other,
-            self.usb_current / other,
-            self.aux_current / other,
-            self.main_voltage / other,
-        ]
-        sample_time = self.sample_time / other
-
-        return LvpmReading(reading_list, sample_time)
-
-
-class CalibrationApplier(ParallelTransformer):
-    """Applies the calibration formula to the all given samples.
-
-    Designed to come after a SampleChunker Transformer.
-    """
-
-    @staticmethod
-    def _is_device_calibrated(data):
-        """Checks to see if the Monsoon has completed calibration.
-
-        Args:
-            data: the calibration data.
-
-        Returns:
-            True if the data is calibrated. False otherwise.
-        """
-        try:
-            # If the data is calibrated for any Origin.REFERENCE value, it is
-            # calibrated for all Origin.REFERENCE values. The same is true for
-            # Origin.ZERO.
-            data.get(Channel.MAIN, Origin.REFERENCE, Granularity.COARSE)
-            data.get(Channel.MAIN, Origin.ZERO, Granularity.COARSE)
-        except CalibrationError:
-            return False
-        return True
-
-    def _transform_buffer(self, buffer):
-        calibration_data = buffer.calibration_data
-
-        if not self._is_device_calibrated(calibration_data):
-            return []
-
-        measurements = np.array([sample.values for sample in buffer.samples])
-        readings = np.zeros((len(buffer.samples), 5))
-
-        for channel in Channel.values:
-            fine_zero = calibration_data.get(channel, Origin.ZERO,
-                                             Granularity.FINE)
-            fine_scale = calibration_data.get(channel, Origin.SCALE,
-                                              Granularity.FINE)
-            coarse_zero = calibration_data.get(channel, Origin.ZERO,
-                                               Granularity.COARSE)
-            coarse_scale = calibration_data.get(channel, Origin.SCALE,
-                                                Granularity.COARSE)
-
-            # A set LSB means a coarse measurement. This bit needs to be
-            # cleared before setting calibration. Note that the
-            # reverse-engineered algorithm does not rightshift the bits after
-            # this operation. This explains the mismatch of calibration
-            # constants between the reverse-engineered algorithm and the
-            # Monsoon.py algorithm.
-            readings[:, channel] = np.where(
-                measurements[:, channel] & 1,
-                ((measurements[:, channel] & ~1) - coarse_zero) * coarse_scale,
-                (measurements[:, channel] - fine_zero) * fine_scale)
-
-        # The magic number 0.000125 is documented at
-        # http://wiki/Main/MonsoonProtocol#Data_response
-        # It represents how many volts represents each tick in the sample
-        # packet.
-        readings[:, 3] = measurements[:, 3] * 0.000125
-
-        for i in range(len(buffer.samples)):
-            buffer.samples[i] = LvpmReading(
-                list(readings[i]), buffer.samples[i].get_sample_time())
-
-        return buffer.samples
diff --git a/src/antlion/controllers/native.py b/src/antlion/controllers/native.py
deleted file mode 100644
index 8e11c87..0000000
--- a/src/antlion/controllers/native.py
+++ /dev/null
@@ -1,67 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.sl4a_lib.rpc_connection import RpcConnection
-import json
-import os
-
-HOST = os.environ.get('AP_HOST', None)
-PORT = os.environ.get('AP_PORT', 9999)
-
-
-class SL4NException(Exception):
-    pass
-
-
-class SL4NAPIError(SL4NException):
-    """Raised when remote API reports an error."""
-
-
-class SL4NProtocolError(SL4NException):
-    """Raised when there is an error exchanging data with the device server."""
-    NO_RESPONSE_FROM_HANDSHAKE = "No response from handshake."
-    NO_RESPONSE_FROM_SERVER = "No response from server."
-    MISMATCHED_API_ID = "Mismatched API id."
-
-
-def IDCounter():
-    i = 0
-    while True:
-        yield i
-        i += 1
-
-
-class NativeAndroid(RpcConnection):
-    COUNTER = IDCounter()
-
-    def _rpc(self, method, *args):
-        with self._lock:
-            apiid = next(self._counter)
-        data = {'id': apiid, 'method': method, 'params': args}
-        request = json.dumps(data)
-        self.client.write(request.encode("utf8") + b'\n')
-        self.client.flush()
-        response = self.client.readline()
-        if not response:
-            raise SL4NProtocolError(SL4NProtocolError.NO_RESPONSE_FROM_SERVER)
-        #TODO: (tturney) fix the C side from sending \x00 char over the socket.
-        result = json.loads(
-            str(response, encoding="utf8").rstrip().replace("\x00", ""))
-        if result['error']:
-            raise SL4NAPIError(result['error'])
-        if result['id'] != apiid:
-            raise SL4NProtocolError(SL4NProtocolError.MISMATCHED_API_ID)
-        return result['result']
diff --git a/src/antlion/controllers/native_android_device.py b/src/antlion/controllers/native_android_device.py
deleted file mode 100644
index b898bf6..0000000
--- a/src/antlion/controllers/native_android_device.py
+++ /dev/null
@@ -1,134 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.android_device import AndroidDevice
-from antlion.controllers.utils_lib import host_utils
-import antlion.controllers.native as native
-from subprocess import call
-
-import logging
-import time
-
-#TODO(tturney): Merge this into android device
-
-MOBLY_CONTROLLER_CONFIG_NAME = "NativeAndroidDevice"
-ACTS_CONTROLLER_REFERENCE_NAME = "native_android_devices"
-
-
-def create(configs):
-    logger = logging
-    ads = get_instances(configs)
-    for ad in ads:
-        try:
-            ad.get_droid()
-        except:
-            logger.exception("Failed to start sl4n on %s" % ad.serial)
-    return ads
-
-
-def destroy(ads):
-    pass
-
-
-def get_instances(serials, ):
-    """Create AndroidDevice instances from a list of serials.
-
-    Args:
-        serials: A list of android device serials.
-        logger: A logger to be passed to each instance.
-
-    Returns:
-        A list of AndroidDevice objects.
-    """
-    results = []
-    for s in serials:
-        results.append(NativeAndroidDevice(s))
-    return results
-
-
-class NativeAndroidDeviceError(Exception):
-    pass
-
-
-class NativeAndroidDevice(AndroidDevice):
-    def __del__(self):
-        if self.h_port:
-            self.adb.forward("--remove tcp:%d" % self.h_port)
-
-    def get_droid(self, handle_event=True):
-        """Create an sl4n connection to the device.
-
-        Return the connection handler 'droid'. By default, another connection
-        on the same session is made for EventDispatcher, and the dispatcher is
-        returned to the caller as well.
-        If sl4n server is not started on the device, try to start it.
-
-        Args:
-            handle_event: True if this droid session will need to handle
-                events.
-
-        Returns:
-            droid: Android object useds to communicate with sl4n on the android
-                device.
-            ed: An optional EventDispatcher to organize events for this droid.
-
-        Examples:
-            Don't need event handling:
-            >>> ad = NativeAndroidDevice()
-            >>> droid = ad.get_droid(False)
-
-            Need event handling:
-            >>> ad = NativeAndroidDevice()
-            >>> droid, ed = ad.get_droid()
-        """
-        if not self.h_port or not host_utils.is_port_available(self.h_port):
-            self.h_port = host_utils.get_available_host_port()
-        self.adb.tcp_forward(self.h_port, self.d_port)
-        pid = self.adb.shell("pidof -s sl4n", ignore_status=True)
-        while (pid):
-            self.adb.shell("kill {}".format(pid))
-            pid = self.adb.shell("pidof -s sl4n", ignore_status=True)
-        call(
-            ["adb -s " + self.serial + " shell sh -c \"/system/bin/sl4n\" &"],
-            shell=True)
-        try:
-            time.sleep(3)
-            droid = self.start_new_session()
-        except:
-            droid = self.start_new_session()
-        return droid
-
-    def start_new_session(self):
-        """Start a new session in sl4n.
-
-        Also caches the droid in a dict with its uid being the key.
-
-        Returns:
-            An Android object used to communicate with sl4n on the android
-                device.
-
-        Raises:
-            sl4nException: Something is wrong with sl4n and it returned an
-            existing uid to a new session.
-        """
-        droid = native.NativeAndroid(port=self.h_port)
-        droid.open()
-        if droid.uid in self._droid_sessions:
-            raise bt.SL4NException(("SL4N returned an existing uid for a "
-                                    "new session. Abort."))
-            return droid
-        self._droid_sessions[droid.uid] = [droid]
-        return droid
diff --git a/src/antlion/controllers/openwrt_ap.py b/src/antlion/controllers/openwrt_ap.py
index ad6e3de..dc99ef2 100644
--- a/src/antlion/controllers/openwrt_ap.py
+++ b/src/antlion/controllers/openwrt_ap.py
@@ -10,14 +10,15 @@
 from antlion.controllers.openwrt_lib import network_settings
 from antlion.controllers.openwrt_lib import wireless_config
 from antlion.controllers.openwrt_lib import wireless_settings_applier
-from antlion.controllers.openwrt_lib.openwrt_constants import OpenWrtModelMap as modelmap
+from antlion.controllers.openwrt_lib.openwrt_constants import (
+    OpenWrtModelMap as modelmap,
+)
 from antlion.controllers.openwrt_lib.openwrt_constants import OpenWrtWifiSetting
 from antlion.controllers.openwrt_lib.openwrt_constants import SYSTEM_INFO_CMD
 from antlion.controllers.utils_lib.ssh import connection
 from antlion.controllers.utils_lib.ssh import settings
 import yaml
 
-
 MOBLY_CONTROLLER_CONFIG_NAME = "OpenWrtAP"
 ACTS_CONTROLLER_REFERENCE_NAME = "access_points"
 OPEN_SECURITY = "none"
@@ -37,616 +38,682 @@
 
 
 def create(configs):
-  """Creates ap controllers from a json config.
+    """Creates ap controllers from a json config.
 
-  Creates an ap controller from either a list, or a single element. The element
-  can either be just the hostname or a dictionary containing the hostname and
-  username of the AP to connect to over SSH.
+    Creates an ap controller from either a list, or a single element. The element
+    can either be just the hostname or a dictionary containing the hostname and
+    username of the AP to connect to over SSH.
 
-  Args:
-    configs: The json configs that represent this controller.
+    Args:
+      configs: The json configs that represent this controller.
 
-  Returns:
-    AccessPoint object
+    Returns:
+      AccessPoint object
 
-  Example:
-    Below is the config file entry for OpenWrtAP as a list. A testbed can have
-    1 or more APs to configure. Each AP has a "ssh_config" key to provide SSH
-    login information. OpenWrtAP#__init__() uses this to create SSH object.
+    Example:
+      Below is the config file entry for OpenWrtAP as a list. A testbed can have
+      1 or more APs to configure. Each AP has a "ssh_config" key to provide SSH
+      login information. OpenWrtAP#__init__() uses this to create SSH object.
 
-      "OpenWrtAP": [
-        {
-          "ssh_config": {
-            "user" : "root",
-            "host" : "192.168.1.1"
+        "OpenWrtAP": [
+          {
+            "ssh_config": {
+              "user" : "root",
+              "host" : "192.168.1.1"
+            }
+          },
+          {
+            "ssh_config": {
+              "user" : "root",
+              "host" : "192.168.1.2"
+            }
           }
-        },
-        {
-          "ssh_config": {
-            "user" : "root",
-            "host" : "192.168.1.2"
-          }
-        }
-      ]
-  """
-  return [OpenWrtAP(c) for c in configs]
+        ]
+    """
+    return [OpenWrtAP(c) for c in configs]
 
 
 def destroy(aps):
-  """Destroys a list of AccessPoints.
+    """Destroys a list of AccessPoints.
 
-  Args:
-    aps: The list of AccessPoints to destroy.
-  """
-  for ap in aps:
-    ap.close()
-    ap.close_ssh()
+    Args:
+      aps: The list of AccessPoints to destroy.
+    """
+    for ap in aps:
+        ap.close()
+        ap.close_ssh()
 
 
 def get_info(aps):
-  """Get information on a list of access points.
+    """Get information on a list of access points.
 
-  Args:
-    aps: A list of AccessPoints.
+    Args:
+      aps: A list of AccessPoints.
 
-  Returns:
-    A list of all aps hostname.
-  """
-  return [ap.ssh_settings.hostname for ap in aps]
+    Returns:
+      A list of all aps hostname.
+    """
+    return [ap.ssh_settings.hostname for ap in aps]
 
 
 class OpenWrtAP(object):
-  """An AccessPoint controller.
+    """An AccessPoint controller.
 
-  Attributes:
-    ssh: The ssh connection to the AP.
-    ssh_settings: The ssh settings being used by the ssh connection.
-    log: Logging object for AccessPoint.
-    wireless_setting: object holding wireless configuration.
-    network_setting: Object for network configuration.
-    model: OpenWrt HW model.
-    radios: Fit interface for test.
-  """
-
-  def __init__(self, config):
-    """Initialize AP."""
-    self.ssh_settings = settings.from_config(config["ssh_config"])
-    self.ssh = connection.SshConnection(self.ssh_settings)
-    self.log = logger.create_logger(
-        lambda msg: "[OpenWrtAP|%s] %s" % (self.ssh_settings.hostname, msg))
-    self.wireless_setting = None
-    self.network_setting = network_settings.NetworkSettings(
-        self.ssh, self.ssh_settings, self.log)
-    self.model = self.get_model_name()
-    if self.model in modelmap.__dict__:
-      self.radios = modelmap.__dict__[self.model]
-    else:
-      self.radios = DEFAULT_RADIOS
-
-  def configure_ap(self, wifi_configs, channel_2g, channel_5g):
-    """Configure AP with the required settings.
-
-    Each test class inherits WifiBaseTest. Based on the test, we may need to
-    configure PSK, WEP, OPEN, ENT networks on 2G and 5G bands in any
-    combination. We call WifiBaseTest methods get_psk_network(),
-    get_open_network(), get_wep_network() and get_ent_network() to create
-    dictionaries which contains this information. 'wifi_configs' is a list of
-    such dictionaries. Example below configures 2 WiFi networks - 1 PSK 2G and
-    1 Open 5G on one AP. configure_ap() is called from WifiBaseTest to
-    configure the APs.
-
-    wifi_configs = [
-      {
-        '2g': {
-          'SSID': '2g_AkqXWPK4',
-          'security': 'psk2',
-          'password': 'YgYuXqDO9H',
-          'hiddenSSID': False
-        },
-      },
-      {
-        '5g': {
-          'SSID': '5g_8IcMR1Sg',
-          'security': 'none',
-          'hiddenSSID': False
-        },
-      }
-    ]
-
-    Args:
-      wifi_configs: list of network settings for 2G and 5G bands.
-      channel_2g: channel for 2G band.
-      channel_5g: channel for 5G band.
-    """
-    # generate wifi configs to configure
-    wireless_configs = self.generate_wireless_configs(wifi_configs)
-    self.wireless_setting = wireless_settings_applier.WirelessSettingsApplier(
-        self.ssh, wireless_configs, channel_2g, channel_5g, self.radios[1], self.radios[0])
-    self.wireless_setting.apply_wireless_settings()
-
-  def start_ap(self):
-    """Starts the AP with the settings in /etc/config/wireless."""
-    self.ssh.run("wifi up")
-    curr_time = time.time()
-    while time.time() < curr_time + WAIT_TIME:
-      if self.get_wifi_status():
-        return
-      time.sleep(3)
-    if not self.get_wifi_status():
-      raise ValueError("Failed to turn on WiFi on the AP.")
-
-  def stop_ap(self):
-    """Stops the AP."""
-    self.ssh.run("wifi down")
-    curr_time = time.time()
-    while time.time() < curr_time + WAIT_TIME:
-      if not self.get_wifi_status():
-        return
-      time.sleep(3)
-    if self.get_wifi_status():
-      raise ValueError("Failed to turn off WiFi on the AP.")
-
-  def get_bssids_for_wifi_networks(self):
-    """Get BSSIDs for wifi networks configured.
-
-    Returns:
-      Dictionary of SSID - BSSID map for both bands.
-    """
-    bssid_map = {"2g": {}, "5g": {}}
-    for radio in self.radios:
-      ssid_ifname_map = self.get_ifnames_for_ssids(radio)
-      if radio == self.radios[0]:
-        for ssid, ifname in ssid_ifname_map.items():
-          bssid_map["5g"][ssid] = self.get_bssid(ifname)
-      elif radio == self.radios[1]:
-        for ssid, ifname in ssid_ifname_map.items():
-          bssid_map["2g"][ssid] = self.get_bssid(ifname)
-    return bssid_map
-
-  def get_ifnames_for_ssids(self, radio):
-    """Get interfaces for wifi networks.
-
-    Args:
-      radio: 2g or 5g radio get the bssids from.
-
-    Returns:
-      dictionary of ssid - ifname mappings.
-    """
-    ssid_ifname_map = {}
-    str_output = self.ssh.run("wifi status %s" % radio).stdout
-    wifi_status = yaml.load(str_output.replace("\t", "").replace("\n", ""),
-                            Loader=yaml.SafeLoader)
-    wifi_status = wifi_status[radio]
-    if wifi_status["up"]:
-      interfaces = wifi_status["interfaces"]
-      for config in interfaces:
-        ssid = config["config"]["ssid"]
-        ifname = config["ifname"]
-        ssid_ifname_map[ssid] = ifname
-    return ssid_ifname_map
-
-  def get_bssid(self, ifname):
-    """Get MAC address from an interface.
-
-    Args:
-      ifname: interface name of the corresponding MAC.
-
-    Returns:
-      BSSID of the interface.
-    """
-    ifconfig = self.ssh.run("ifconfig %s" % ifname).stdout
-    mac_addr = ifconfig.split("\n")[0].split()[-1]
-    return mac_addr
-
-  def set_wpa_encryption(self, encryption):
-    """Set different encryptions to wpa or wpa2.
-
-    Args:
-      encryption: ccmp, tkip, or ccmp+tkip.
-    """
-    str_output = self.ssh.run("wifi status").stdout
-    wifi_status = yaml.load(str_output.replace("\t", "").replace("\n", ""),
-                            Loader=yaml.SafeLoader)
-
-    # Counting how many interface are enabled.
-    total_interface = 0
-    for radio in self.radios:
-      num_interface = len(wifi_status[radio]["interfaces"])
-      total_interface += num_interface
-
-    # Iterates every interface to get and set wpa encryption.
-    default_extra_interface = 2
-    for i in range(total_interface + default_extra_interface):
-      origin_encryption = self.ssh.run(
-          "uci get wireless.@wifi-iface[{}].encryption".format(i)).stdout
-      origin_psk_pattern = re.match(r"psk\b", origin_encryption)
-      target_psk_pattern = re.match(r"psk\b", encryption)
-      origin_psk2_pattern = re.match(r"psk2\b", origin_encryption)
-      target_psk2_pattern = re.match(r"psk2\b", encryption)
-
-      if origin_psk_pattern == target_psk_pattern:
-        self.ssh.run(
-            "uci set wireless.@wifi-iface[{}].encryption={}".format(
-                i, encryption))
-
-      if origin_psk2_pattern == target_psk2_pattern:
-        self.ssh.run(
-            "uci set wireless.@wifi-iface[{}].encryption={}".format(
-                i, encryption))
-
-    self.ssh.run("uci commit wireless")
-    self.ssh.run("wifi")
-
-  def set_password(self, pwd_5g=None, pwd_2g=None):
-    """Set password for individual interface.
-
-    Args:
-        pwd_5g: 8 ~ 63 chars, ascii letters and digits password for 5g network.
-        pwd_2g: 8 ~ 63 chars, ascii letters and digits password for 2g network.
-    """
-    if pwd_5g:
-      if len(pwd_5g) < 8 or len(pwd_5g) > 63:
-        self.log.error("Password must be 8~63 characters long")
-      # Only accept ascii letters and digits
-      elif not re.match("^[A-Za-z0-9]*$", pwd_5g):
-        self.log.error("Password must only contains ascii letters and digits")
-      else:
-        self.ssh.run(
-            "uci set wireless.@wifi-iface[{}].key={}".format(3, pwd_5g))
-        self.log.info("Set 5G password to :{}".format(pwd_5g))
-
-    if pwd_2g:
-      if len(pwd_2g) < 8 or len(pwd_2g) > 63:
-        self.log.error("Password must be 8~63 characters long")
-      # Only accept ascii letters and digits
-      elif not re.match("^[A-Za-z0-9]*$", pwd_2g):
-        self.log.error("Password must only contains ascii letters and digits")
-      else:
-        self.ssh.run(
-            "uci set wireless.@wifi-iface[{}].key={}".format(2, pwd_2g))
-        self.log.info("Set 2G password to :{}".format(pwd_2g))
-
-    self.ssh.run("uci commit wireless")
-    self.ssh.run("wifi")
-
-  def set_ssid(self, ssid_5g=None, ssid_2g=None):
-    """Set SSID for individual interface.
-
-    Args:
-        ssid_5g: 8 ~ 63 chars for 5g network.
-        ssid_2g: 8 ~ 63 chars for 2g network.
-    """
-    if ssid_5g:
-      if len(ssid_5g) < 8 or len(ssid_5g) > 63:
-        self.log.error("SSID must be 8~63 characters long")
-      # Only accept ascii letters and digits
-      else:
-        self.ssh.run(
-            "uci set wireless.@wifi-iface[{}].ssid={}".format(3, ssid_5g))
-        self.log.info("Set 5G SSID to :{}".format(ssid_5g))
-
-    if ssid_2g:
-      if len(ssid_2g) < 8 or len(ssid_2g) > 63:
-        self.log.error("SSID must be 8~63 characters long")
-      # Only accept ascii letters and digits
-      else:
-        self.ssh.run(
-            "uci set wireless.@wifi-iface[{}].ssid={}".format(2, ssid_2g))
-        self.log.info("Set 2G SSID to :{}".format(ssid_2g))
-
-    self.ssh.run("uci commit wireless")
-    self.ssh.run("wifi")
-
-  def generate_mobility_domain(self):
-    """Generate 4-character hexadecimal ID.
-
-    Returns:
-      String; a 4-character hexadecimal ID.
-    """
-    md = "{:04x}".format(random.getrandbits(16))
-    self.log.info("Mobility Domain ID: {}".format(md))
-    return md
-
-  def enable_80211r(self, iface, md):
-    """Enable 802.11r for one single radio.
-
-    Args:
-      iface: index number of wifi-iface.
-              2: radio1
-              3: radio0
-      md: mobility domain. a 4-character hexadecimal ID.
-    Raises:
-      TestSkip if 2g or 5g radio is not up or 802.11r is not enabled.
-    """
-    str_output = self.ssh.run("wifi status").stdout
-    wifi_status = yaml.load(str_output.replace("\t", "").replace("\n", ""),
-                            Loader=yaml.SafeLoader)
-    # Check if the radio is up.
-    if iface == OpenWrtWifiSetting.IFACE_2G:
-      if wifi_status[self.radios[1]]["up"]:
-        self.log.info("2g network is ENABLED")
-      else:
-        raise signals.TestSkip("2g network is NOT ENABLED")
-    elif iface == OpenWrtWifiSetting.IFACE_5G:
-      if wifi_status[self.radios[0]]["up"]:
-        self.log.info("5g network is ENABLED")
-      else:
-        raise signals.TestSkip("5g network is NOT ENABLED")
-
-    # Setup 802.11r.
-    self.ssh.run(
-        "uci set wireless.@wifi-iface[{}].ieee80211r='1'".format(iface))
-    self.ssh.run(
-        "uci set wireless.@wifi-iface[{}].ft_psk_generate_local='1'"
-        .format(iface))
-    self.ssh.run(
-        "uci set wireless.@wifi-iface[{}].mobility_domain='{}'"
-        .format(iface, md))
-    self.ssh.run(
-        "uci commit wireless")
-    self.ssh.run("wifi")
-
-    # Check if 802.11r is enabled.
-    result = self.ssh.run(
-        "uci get wireless.@wifi-iface[{}].ieee80211r".format(iface)).stdout
-    if result == "1":
-      self.log.info("802.11r is ENABLED")
-    else:
-      raise signals.TestSkip("802.11r is NOT ENABLED")
-
-  def generate_wireless_configs(self, wifi_configs):
-    """Generate wireless configs to configure.
-
-    Converts wifi_configs from configure_ap() to a list of 'WirelessConfig'
-    objects. Each object represents a wifi network to configure on the AP.
-
-    Args:
-      wifi_configs: Network list of different security types and bands.
-
-    Returns:
-      wireless configuration for openwrt AP.
-    """
-    num_2g = 1
-    num_5g = 1
-    wireless_configs = []
-
-    for i in range(len(wifi_configs)):
-      if hostapd_constants.BAND_2G in wifi_configs[i]:
-        config = wifi_configs[i][hostapd_constants.BAND_2G]
-        if config["security"] == PSK_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_2G, num_2g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_2G,
-                                             password=config["password"],
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=config["ieee80211w"]))
-        elif config["security"] == PSK1_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_2G, num_2g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_2G,
-                                             password=config["password"],
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=config["ieee80211w"]))
-        elif config["security"] == WEP_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_2G, num_2g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_2G,
-                                             wep_key=config["wepKeys"][0],
-                                             hidden=config["hiddenSSID"]))
-        elif config["security"] == OPEN_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_2G, num_2g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_2G,
-                                             hidden=config["hiddenSSID"]))
-        elif config["security"] == OWE_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_2G, num_2g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_2G,
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=PMF_ENABLED))
-        elif config["security"] == SAE_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_2G, num_2g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_2G,
-                                             password=config["password"],
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=PMF_ENABLED))
-        elif config["security"] == SAEMIXED_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_2G, num_2g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_2G,
-                                             password=config["password"],
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=config["ieee80211w"]))
-        elif config["security"] == ENT_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig(
-                  "%s%s" % (WIFI_2G, num_2g),
-                  config["SSID"],
-                  config["security"],
-                  hostapd_constants.BAND_2G,
-                  radius_server_ip=config["radius_server_ip"],
-                  radius_server_port=config["radius_server_port"],
-                  radius_server_secret=config["radius_server_secret"],
-                  hidden=config["hiddenSSID"]))
-        num_2g += 1
-      if hostapd_constants.BAND_5G in wifi_configs[i]:
-        config = wifi_configs[i][hostapd_constants.BAND_5G]
-        if config["security"] == PSK_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_5G, num_5g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_5G,
-                                             password=config["password"],
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=config["ieee80211w"]))
-        elif config["security"] == PSK1_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_5G, num_5g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_5G,
-                                             password=config["password"],
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=config["ieee80211w"]))
-        elif config["security"] == WEP_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_5G, num_5g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_5G,
-                                             wep_key=config["wepKeys"][0],
-                                             hidden=config["hiddenSSID"]))
-        elif config["security"] == OPEN_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_5G, num_5g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_5G,
-                                             hidden=config["hiddenSSID"]))
-        elif config["security"] == OWE_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_5G, num_5g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_5G,
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=PMF_ENABLED))
-        elif config["security"] == SAE_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_5G, num_5g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_5G,
-                                             password=config["password"],
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=PMF_ENABLED))
-        elif config["security"] == SAEMIXED_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_5G, num_5g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_5G,
-                                             password=config["password"],
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=config["ieee80211w"]))
-        elif config["security"] == ENT_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig(
-                  "%s%s" % (WIFI_5G, num_5g),
-                  config["SSID"],
-                  config["security"],
-                  hostapd_constants.BAND_5G,
-                  radius_server_ip=config["radius_server_ip"],
-                  radius_server_port=config["radius_server_port"],
-                  radius_server_secret=config["radius_server_secret"],
-                  hidden=config["hiddenSSID"]))
-        num_5g += 1
-
-    return wireless_configs
-
-  def get_wifi_network(self, security=None, band=None):
-    """Return first match wifi interface's config.
-
-    Args:
-      security: psk2 or none
-      band: '2g' or '5g'
-
-    Returns:
-      A dict contains match wifi interface's config.
+    Attributes:
+      ssh: The ssh connection to the AP.
+      ssh_settings: The ssh settings being used by the ssh connection.
+      log: Logging object for AccessPoint.
+      wireless_setting: object holding wireless configuration.
+      network_setting: Object for network configuration.
+      model: OpenWrt HW model.
+      radios: Fit interface for test.
     """
 
-    for wifi_iface in self.wireless_setting.wireless_configs:
-      match_list = []
-      wifi_network = wifi_iface.__dict__
-      if security:
-        match_list.append(security == wifi_network["security"])
-      if band:
-        match_list.append(band == wifi_network["band"])
+    def __init__(self, config):
+        """Initialize AP."""
+        self.ssh_settings = settings.from_config(config["ssh_config"])
+        self.ssh = connection.SshConnection(self.ssh_settings)
+        self.log = logger.create_logger(
+            lambda msg: "[OpenWrtAP|%s] %s" % (self.ssh_settings.hostname, msg)
+        )
+        self.wireless_setting = None
+        self.network_setting = network_settings.NetworkSettings(
+            self.ssh, self.ssh_settings, self.log
+        )
+        self.model = self.get_model_name()
+        if self.model in modelmap.__dict__:
+            self.radios = modelmap.__dict__[self.model]
+        else:
+            self.radios = DEFAULT_RADIOS
 
-      if all(match_list):
-        wifi_network["SSID"] = wifi_network["ssid"]
-        if not wifi_network["password"]:
-          del wifi_network["password"]
-        return wifi_network
-    return None
+    def configure_ap(self, wifi_configs, channel_2g, channel_5g):
+        """Configure AP with the required settings.
 
-  def get_wifi_status(self):
-    """Check if radios are up. Default are 2G and 5G bands.
+        Each test class inherits WifiBaseTest. Based on the test, we may need to
+        configure PSK, WEP, OPEN, ENT networks on 2G and 5G bands in any
+        combination. We call WifiBaseTest methods get_psk_network(),
+        get_open_network(), get_wep_network() and get_ent_network() to create
+        dictionaries which contains this information. 'wifi_configs' is a list of
+        such dictionaries. Example below configures 2 WiFi networks - 1 PSK 2G and
+        1 Open 5G on one AP. configure_ap() is called from WifiBaseTest to
+        configure the APs.
 
-    Returns:
-      True if both radios are up. False if not.
-    """
-    status = True
-    for radio in self.radios:
-      try:
+        wifi_configs = [
+          {
+            '2g': {
+              'SSID': '2g_AkqXWPK4',
+              'security': 'psk2',
+              'password': 'YgYuXqDO9H',
+              'hiddenSSID': False
+            },
+          },
+          {
+            '5g': {
+              'SSID': '5g_8IcMR1Sg',
+              'security': 'none',
+              'hiddenSSID': False
+            },
+          }
+        ]
+
+        Args:
+          wifi_configs: list of network settings for 2G and 5G bands.
+          channel_2g: channel for 2G band.
+          channel_5g: channel for 5G band.
+        """
+        # generate wifi configs to configure
+        wireless_configs = self.generate_wireless_configs(wifi_configs)
+        self.wireless_setting = wireless_settings_applier.WirelessSettingsApplier(
+            self.ssh,
+            wireless_configs,
+            channel_2g,
+            channel_5g,
+            self.radios[1],
+            self.radios[0],
+        )
+        self.wireless_setting.apply_wireless_settings()
+
+    def start_ap(self):
+        """Starts the AP with the settings in /etc/config/wireless."""
+        self.ssh.run("wifi up")
+        curr_time = time.time()
+        while time.time() < curr_time + WAIT_TIME:
+            if self.get_wifi_status():
+                return
+            time.sleep(3)
+        if not self.get_wifi_status():
+            raise ValueError("Failed to turn on WiFi on the AP.")
+
+    def stop_ap(self):
+        """Stops the AP."""
+        self.ssh.run("wifi down")
+        curr_time = time.time()
+        while time.time() < curr_time + WAIT_TIME:
+            if not self.get_wifi_status():
+                return
+            time.sleep(3)
+        if self.get_wifi_status():
+            raise ValueError("Failed to turn off WiFi on the AP.")
+
+    def get_bssids_for_wifi_networks(self):
+        """Get BSSIDs for wifi networks configured.
+
+        Returns:
+          Dictionary of SSID - BSSID map for both bands.
+        """
+        bssid_map = {"2g": {}, "5g": {}}
+        for radio in self.radios:
+            ssid_ifname_map = self.get_ifnames_for_ssids(radio)
+            if radio == self.radios[0]:
+                for ssid, ifname in ssid_ifname_map.items():
+                    bssid_map["5g"][ssid] = self.get_bssid(ifname)
+            elif radio == self.radios[1]:
+                for ssid, ifname in ssid_ifname_map.items():
+                    bssid_map["2g"][ssid] = self.get_bssid(ifname)
+        return bssid_map
+
+    def get_ifnames_for_ssids(self, radio):
+        """Get interfaces for wifi networks.
+
+        Args:
+          radio: 2g or 5g radio get the bssids from.
+
+        Returns:
+          dictionary of ssid - ifname mappings.
+        """
+        ssid_ifname_map = {}
         str_output = self.ssh.run("wifi status %s" % radio).stdout
-        wifi_status = yaml.load(str_output.replace("\t", "").replace("\n", ""),
-                                Loader=yaml.SafeLoader)
-        status = wifi_status[radio]["up"] and status
-      except:
-        self.log.info("Failed to make ssh connection to the OpenWrt")
+        wifi_status = yaml.load(
+            str_output.replace("\t", "").replace("\n", ""), Loader=yaml.SafeLoader
+        )
+        wifi_status = wifi_status[radio]
+        if wifi_status["up"]:
+            interfaces = wifi_status["interfaces"]
+            for config in interfaces:
+                ssid = config["config"]["ssid"]
+                ifname = config["ifname"]
+                ssid_ifname_map[ssid] = ifname
+        return ssid_ifname_map
+
+    def get_bssid(self, ifname):
+        """Get MAC address from an interface.
+
+        Args:
+          ifname: interface name of the corresponding MAC.
+
+        Returns:
+          BSSID of the interface.
+        """
+        ifconfig = self.ssh.run("ifconfig %s" % ifname).stdout
+        mac_addr = ifconfig.split("\n")[0].split()[-1]
+        return mac_addr
+
+    def set_wpa_encryption(self, encryption):
+        """Set different encryptions to wpa or wpa2.
+
+        Args:
+          encryption: ccmp, tkip, or ccmp+tkip.
+        """
+        str_output = self.ssh.run("wifi status").stdout
+        wifi_status = yaml.load(
+            str_output.replace("\t", "").replace("\n", ""), Loader=yaml.SafeLoader
+        )
+
+        # Counting how many interface are enabled.
+        total_interface = 0
+        for radio in self.radios:
+            num_interface = len(wifi_status[radio]["interfaces"])
+            total_interface += num_interface
+
+        # Iterates every interface to get and set wpa encryption.
+        default_extra_interface = 2
+        for i in range(total_interface + default_extra_interface):
+            origin_encryption = self.ssh.run(
+                "uci get wireless.@wifi-iface[{}].encryption".format(i)
+            ).stdout
+            origin_psk_pattern = re.match(r"psk\b", origin_encryption)
+            target_psk_pattern = re.match(r"psk\b", encryption)
+            origin_psk2_pattern = re.match(r"psk2\b", origin_encryption)
+            target_psk2_pattern = re.match(r"psk2\b", encryption)
+
+            if origin_psk_pattern == target_psk_pattern:
+                self.ssh.run(
+                    "uci set wireless.@wifi-iface[{}].encryption={}".format(
+                        i, encryption
+                    )
+                )
+
+            if origin_psk2_pattern == target_psk2_pattern:
+                self.ssh.run(
+                    "uci set wireless.@wifi-iface[{}].encryption={}".format(
+                        i, encryption
+                    )
+                )
+
+        self.ssh.run("uci commit wireless")
+        self.ssh.run("wifi")
+
+    def set_password(self, pwd_5g=None, pwd_2g=None):
+        """Set password for individual interface.
+
+        Args:
+            pwd_5g: 8 ~ 63 chars, ascii letters and digits password for 5g network.
+            pwd_2g: 8 ~ 63 chars, ascii letters and digits password for 2g network.
+        """
+        if pwd_5g:
+            if len(pwd_5g) < 8 or len(pwd_5g) > 63:
+                self.log.error("Password must be 8~63 characters long")
+            # Only accept ascii letters and digits
+            elif not re.match("^[A-Za-z0-9]*$", pwd_5g):
+                self.log.error("Password must only contains ascii letters and digits")
+            else:
+                self.ssh.run(
+                    "uci set wireless.@wifi-iface[{}].key={}".format(3, pwd_5g)
+                )
+                self.log.info("Set 5G password to :{}".format(pwd_5g))
+
+        if pwd_2g:
+            if len(pwd_2g) < 8 or len(pwd_2g) > 63:
+                self.log.error("Password must be 8~63 characters long")
+            # Only accept ascii letters and digits
+            elif not re.match("^[A-Za-z0-9]*$", pwd_2g):
+                self.log.error("Password must only contains ascii letters and digits")
+            else:
+                self.ssh.run(
+                    "uci set wireless.@wifi-iface[{}].key={}".format(2, pwd_2g)
+                )
+                self.log.info("Set 2G password to :{}".format(pwd_2g))
+
+        self.ssh.run("uci commit wireless")
+        self.ssh.run("wifi")
+
+    def set_ssid(self, ssid_5g=None, ssid_2g=None):
+        """Set SSID for individual interface.
+
+        Args:
+            ssid_5g: 8 ~ 63 chars for 5g network.
+            ssid_2g: 8 ~ 63 chars for 2g network.
+        """
+        if ssid_5g:
+            if len(ssid_5g) < 8 or len(ssid_5g) > 63:
+                self.log.error("SSID must be 8~63 characters long")
+            # Only accept ascii letters and digits
+            else:
+                self.ssh.run(
+                    "uci set wireless.@wifi-iface[{}].ssid={}".format(3, ssid_5g)
+                )
+                self.log.info("Set 5G SSID to :{}".format(ssid_5g))
+
+        if ssid_2g:
+            if len(ssid_2g) < 8 or len(ssid_2g) > 63:
+                self.log.error("SSID must be 8~63 characters long")
+            # Only accept ascii letters and digits
+            else:
+                self.ssh.run(
+                    "uci set wireless.@wifi-iface[{}].ssid={}".format(2, ssid_2g)
+                )
+                self.log.info("Set 2G SSID to :{}".format(ssid_2g))
+
+        self.ssh.run("uci commit wireless")
+        self.ssh.run("wifi")
+
+    def generate_mobility_domain(self):
+        """Generate 4-character hexadecimal ID.
+
+        Returns:
+          String; a 4-character hexadecimal ID.
+        """
+        md = "{:04x}".format(random.getrandbits(16))
+        self.log.info("Mobility Domain ID: {}".format(md))
+        return md
+
+    def enable_80211r(self, iface, md):
+        """Enable 802.11r for one single radio.
+
+        Args:
+          iface: index number of wifi-iface.
+                  2: radio1
+                  3: radio0
+          md: mobility domain. a 4-character hexadecimal ID.
+        Raises:
+          TestSkip if 2g or 5g radio is not up or 802.11r is not enabled.
+        """
+        str_output = self.ssh.run("wifi status").stdout
+        wifi_status = yaml.load(
+            str_output.replace("\t", "").replace("\n", ""), Loader=yaml.SafeLoader
+        )
+        # Check if the radio is up.
+        if iface == OpenWrtWifiSetting.IFACE_2G:
+            if wifi_status[self.radios[1]]["up"]:
+                self.log.info("2g network is ENABLED")
+            else:
+                raise signals.TestSkip("2g network is NOT ENABLED")
+        elif iface == OpenWrtWifiSetting.IFACE_5G:
+            if wifi_status[self.radios[0]]["up"]:
+                self.log.info("5g network is ENABLED")
+            else:
+                raise signals.TestSkip("5g network is NOT ENABLED")
+
+        # Setup 802.11r.
+        self.ssh.run("uci set wireless.@wifi-iface[{}].ieee80211r='1'".format(iface))
+        self.ssh.run(
+            "uci set wireless.@wifi-iface[{}].ft_psk_generate_local='1'".format(iface)
+        )
+        self.ssh.run(
+            "uci set wireless.@wifi-iface[{}].mobility_domain='{}'".format(iface, md)
+        )
+        self.ssh.run("uci commit wireless")
+        self.ssh.run("wifi")
+
+        # Check if 802.11r is enabled.
+        result = self.ssh.run(
+            "uci get wireless.@wifi-iface[{}].ieee80211r".format(iface)
+        ).stdout
+        if result == "1":
+            self.log.info("802.11r is ENABLED")
+        else:
+            raise signals.TestSkip("802.11r is NOT ENABLED")
+
+    def generate_wireless_configs(self, wifi_configs):
+        """Generate wireless configs to configure.
+
+        Converts wifi_configs from configure_ap() to a list of 'WirelessConfig'
+        objects. Each object represents a wifi network to configure on the AP.
+
+        Args:
+          wifi_configs: Network list of different security types and bands.
+
+        Returns:
+          wireless configuration for openwrt AP.
+        """
+        num_2g = 1
+        num_5g = 1
+        wireless_configs = []
+
+        for i in range(len(wifi_configs)):
+            if hostapd_constants.BAND_2G in wifi_configs[i]:
+                config = wifi_configs[i][hostapd_constants.BAND_2G]
+                if config["security"] == PSK_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_2G, num_2g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_2G,
+                            password=config["password"],
+                            hidden=config["hiddenSSID"],
+                            ieee80211w=config["ieee80211w"],
+                        )
+                    )
+                elif config["security"] == PSK1_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_2G, num_2g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_2G,
+                            password=config["password"],
+                            hidden=config["hiddenSSID"],
+                            ieee80211w=config["ieee80211w"],
+                        )
+                    )
+                elif config["security"] == WEP_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_2G, num_2g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_2G,
+                            wep_key=config["wepKeys"][0],
+                            hidden=config["hiddenSSID"],
+                        )
+                    )
+                elif config["security"] == OPEN_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_2G, num_2g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_2G,
+                            hidden=config["hiddenSSID"],
+                        )
+                    )
+                elif config["security"] == OWE_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_2G, num_2g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_2G,
+                            hidden=config["hiddenSSID"],
+                            ieee80211w=PMF_ENABLED,
+                        )
+                    )
+                elif config["security"] == SAE_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_2G, num_2g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_2G,
+                            password=config["password"],
+                            hidden=config["hiddenSSID"],
+                            ieee80211w=PMF_ENABLED,
+                        )
+                    )
+                elif config["security"] == SAEMIXED_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_2G, num_2g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_2G,
+                            password=config["password"],
+                            hidden=config["hiddenSSID"],
+                            ieee80211w=config["ieee80211w"],
+                        )
+                    )
+                elif config["security"] == ENT_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_2G, num_2g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_2G,
+                            radius_server_ip=config["radius_server_ip"],
+                            radius_server_port=config["radius_server_port"],
+                            radius_server_secret=config["radius_server_secret"],
+                            hidden=config["hiddenSSID"],
+                        )
+                    )
+                num_2g += 1
+            if hostapd_constants.BAND_5G in wifi_configs[i]:
+                config = wifi_configs[i][hostapd_constants.BAND_5G]
+                if config["security"] == PSK_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_5G, num_5g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_5G,
+                            password=config["password"],
+                            hidden=config["hiddenSSID"],
+                            ieee80211w=config["ieee80211w"],
+                        )
+                    )
+                elif config["security"] == PSK1_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_5G, num_5g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_5G,
+                            password=config["password"],
+                            hidden=config["hiddenSSID"],
+                            ieee80211w=config["ieee80211w"],
+                        )
+                    )
+                elif config["security"] == WEP_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_5G, num_5g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_5G,
+                            wep_key=config["wepKeys"][0],
+                            hidden=config["hiddenSSID"],
+                        )
+                    )
+                elif config["security"] == OPEN_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_5G, num_5g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_5G,
+                            hidden=config["hiddenSSID"],
+                        )
+                    )
+                elif config["security"] == OWE_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_5G, num_5g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_5G,
+                            hidden=config["hiddenSSID"],
+                            ieee80211w=PMF_ENABLED,
+                        )
+                    )
+                elif config["security"] == SAE_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_5G, num_5g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_5G,
+                            password=config["password"],
+                            hidden=config["hiddenSSID"],
+                            ieee80211w=PMF_ENABLED,
+                        )
+                    )
+                elif config["security"] == SAEMIXED_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_5G, num_5g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_5G,
+                            password=config["password"],
+                            hidden=config["hiddenSSID"],
+                            ieee80211w=config["ieee80211w"],
+                        )
+                    )
+                elif config["security"] == ENT_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_5G, num_5g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_5G,
+                            radius_server_ip=config["radius_server_ip"],
+                            radius_server_port=config["radius_server_port"],
+                            radius_server_secret=config["radius_server_secret"],
+                            hidden=config["hiddenSSID"],
+                        )
+                    )
+                num_5g += 1
+
+        return wireless_configs
+
+    def get_wifi_network(self, security=None, band=None):
+        """Return first match wifi interface's config.
+
+        Args:
+          security: psk2 or none
+          band: '2g' or '5g'
+
+        Returns:
+          A dict contains match wifi interface's config.
+        """
+
+        for wifi_iface in self.wireless_setting.wireless_configs:
+            match_list = []
+            wifi_network = wifi_iface.__dict__
+            if security:
+                match_list.append(security == wifi_network["security"])
+            if band:
+                match_list.append(band == wifi_network["band"])
+
+            if all(match_list):
+                wifi_network["SSID"] = wifi_network["ssid"]
+                if not wifi_network["password"]:
+                    del wifi_network["password"]
+                return wifi_network
+        return None
+
+    def get_wifi_status(self):
+        """Check if radios are up. Default are 2G and 5G bands.
+
+        Returns:
+          True if both radios are up. False if not.
+        """
+        status = True
+        for radio in self.radios:
+            try:
+                str_output = self.ssh.run("wifi status %s" % radio).stdout
+                wifi_status = yaml.load(
+                    str_output.replace("\t", "").replace("\n", ""),
+                    Loader=yaml.SafeLoader,
+                )
+                status = wifi_status[radio]["up"] and status
+            except:
+                self.log.info("Failed to make ssh connection to the OpenWrt")
+                return False
+        return status
+
+    def verify_wifi_status(self, timeout=20):
+        """Ensure wifi interfaces are ready.
+
+        Args:
+          timeout: An integer that is the number of times to try
+                   wait for interface ready.
+        Returns:
+          True if both radios are up. False if not.
+        """
+        start_time = time.time()
+        end_time = start_time + timeout
+        while time.time() < end_time:
+            if self.get_wifi_status():
+                return True
+            time.sleep(1)
         return False
-    return status
 
-  def verify_wifi_status(self, timeout=20):
-    """Ensure wifi interfaces are ready.
+    def get_model_name(self):
+        """Get Openwrt model name.
 
-    Args:
-      timeout: An integer that is the number of times to try
-               wait for interface ready.
-    Returns:
-      True if both radios are up. False if not.
-    """
-    start_time = time.time()
-    end_time = start_time + timeout
-    while time.time() < end_time:
-      if self.get_wifi_status():
-        return True
-      time.sleep(1)
-    return False
+        Returns:
+          A string include device brand and model. e.g. NETGEAR_R8000
+        """
+        out = self.ssh.run(SYSTEM_INFO_CMD).stdout.split("\n")
+        for line in out:
+            if "board_name" in line:
+                model = line.split()[1].strip('",').split(",")
+                return "_".join(map(lambda i: i.upper(), model))
+        self.log.info("Failed to retrieve OpenWrt model information.")
+        return None
 
-  def get_model_name(self):
-    """Get Openwrt model name.
+    def close(self):
+        """Reset wireless and network settings to default and stop AP."""
+        if self.network_setting.config:
+            self.network_setting.cleanup_network_settings()
+        if self.wireless_setting:
+            self.wireless_setting.cleanup_wireless_settings()
 
-    Returns:
-      A string include device brand and model. e.g. NETGEAR_R8000
-    """
-    out = self.ssh.run(SYSTEM_INFO_CMD).stdout.split("\n")
-    for line in out:
-      if "board_name" in line:
-        model = (line.split()[1].strip("\",").split(","))
-        return "_".join(map(lambda i: i.upper(), model))
-    self.log.info("Failed to retrieve OpenWrt model information.")
-    return None
+    def close_ssh(self):
+        """Close SSH connection to AP."""
+        self.ssh.close()
 
-  def close(self):
-    """Reset wireless and network settings to default and stop AP."""
-    if self.network_setting.config:
-      self.network_setting.cleanup_network_settings()
-    if self.wireless_setting:
-      self.wireless_setting.cleanup_wireless_settings()
-
-  def close_ssh(self):
-    """Close SSH connection to AP."""
-    self.ssh.close()
-
-  def reboot(self):
-    """Reboot Openwrt."""
-    self.ssh.run("reboot")
-
+    def reboot(self):
+        """Reboot Openwrt."""
+        self.ssh.run("reboot")
diff --git a/src/antlion/controllers/openwrt_lib/network_const.py b/src/antlion/controllers/openwrt_lib/network_const.py
index 3aba0de..3b05b83 100644
--- a/src/antlion/controllers/openwrt_lib/network_const.py
+++ b/src/antlion/controllers/openwrt_lib/network_const.py
@@ -4,13 +4,10 @@
 IPSEC_CONF = {
     "config setup": {
         "charondebug": "chd 2,ike 2,knl 2,net 2,esp 2,dmn 2,"
-                       "mgr 2,lib 1,cfg 2,enc 1".__repr__(),
-        "uniqueids": "never"
+        "mgr 2,lib 1,cfg 2,enc 1".__repr__(),
+        "uniqueids": "never",
     },
-    "conn %default": {
-        "ike": "aes128-sha-modp1024",
-        "esp": "aes128-sha1"
-    }
+    "conn %default": {"ike": "aes128-sha-modp1024", "esp": "aes128-sha1"},
 }
 
 IPSEC_L2TP_PSK = {
@@ -24,7 +21,7 @@
         "rightprotoport": "17/%any",
         "rightsubnet": "0.0.0.0/0",
         "rightauth": "psk",
-        "auto": "add"
+        "auto": "add",
     }
 }
 
@@ -40,7 +37,7 @@
         "rightprotoport": "17/%any",
         "rightsubnet": "0.0.0.0/0",
         "rightauth": "pubkey",
-        "auto": "add"
+        "auto": "add",
     }
 }
 
@@ -102,7 +99,7 @@
         "right": "%any",
         "rightid": "vpntest",
         "rightauth": "eap-mschapv2",
-        "auto": "add"
+        "auto": "add",
     }
 }
 
@@ -116,7 +113,7 @@
         "right": "%any",
         "rightid": "vpntest",
         "rightauth": "psk",
-        "auto": "add"
+        "auto": "add",
     }
 }
 
@@ -133,7 +130,7 @@
         "rightid": "vpntest@%s" % LOCALHOST,
         "rightauth": "pubkey",
         "rightcert": "clientCert.pem",
-        "auto": "add"
+        "auto": "add",
     }
 }
 
@@ -149,7 +146,7 @@
         "right": "%any",
         "rightid": "vpntest",
         "rightauth": "eap-mschapv2",
-        "auto": "add"
+        "auto": "add",
     }
 }
 
@@ -163,7 +160,7 @@
         "right": "%any",
         "rightid": "vpntest",
         "rightauth": "psk",
-        "auto": "add"
+        "auto": "add",
     }
 }
 
@@ -180,7 +177,7 @@
         "rightid": "vpntest@strongswan-vpn-server.android-iperf.com",
         "rightauth": "pubkey",
         "rightcert": "clientCert.pem",
-        "auto": "add"
+        "auto": "add",
     }
 }
 
@@ -228,14 +225,14 @@
     "proxyarp",
     "lcp-echo-interval 30",
     "lcp-echo-failure 4",
-    "nomppe"
+    "nomppe",
 )
 
 # iptable rules for vpn_pptp
 FIREWALL_RULES_FOR_PPTP = (
     "iptables -A input_rule -i ppp+ -j ACCEPT",
     "iptables -A output_rule -o ppp+ -j ACCEPT",
-    "iptables -A forwarding_rule -i ppp+ -j ACCEPT"
+    "iptables -A forwarding_rule -i ppp+ -j ACCEPT",
 )
 
 # iptable rules for vpn_l2tp
@@ -251,7 +248,7 @@
     "iptables -A INPUT -i eth0.2 -p udp --dport 4500 -j ACCEPT",
     "iptables -A INPUT -p udp --dport 500 -j ACCEPT",
     "iptables -A INPUT -p udp --dport 4500 -j ACCEPT",
-    "iptables -A INPUT -p udp -m policy --dir in --pol ipsec -m udp --dport 1701 -j ACCEPT"
+    "iptables -A INPUT -p udp -m policy --dir in --pol ipsec -m udp --dport 1701 -j ACCEPT",
 )
 
 FIREWALL_RULES_DISABLE_DNS_RESPONSE = (
@@ -275,13 +272,15 @@
         name: vpn server name for register in OpenWrt
     """
 
-    def __init__(self,
-                 vpn_server_hostname,
-                 vpn_server_address,
-                 vpn_username,
-                 vpn_password,
-                 psk_secret,
-                 server_name):
+    def __init__(
+        self,
+        vpn_server_hostname,
+        vpn_server_address,
+        vpn_username,
+        vpn_password,
+        psk_secret,
+        server_name,
+    ):
         self.name = server_name
         self.hostname = vpn_server_hostname
         self.address = vpn_server_address
diff --git a/src/antlion/controllers/openwrt_lib/network_settings.py b/src/antlion/controllers/openwrt_lib/network_settings.py
index 10ed891..5d14360 100644
--- a/src/antlion/controllers/openwrt_lib/network_settings.py
+++ b/src/antlion/controllers/openwrt_lib/network_settings.py
@@ -19,7 +19,6 @@
 from antlion import utils
 from antlion.controllers.openwrt_lib import network_const
 
-
 SERVICE_DNSMASQ = "dnsmasq"
 SERVICE_STUNNEL = "stunnel"
 SERVICE_NETWORK = "network"
@@ -92,7 +91,7 @@
             "block_dns_response": self.unblock_dns_response,
             "setup_mdns": self.remove_mdns,
             "add_dhcp_rapid_commit": self.remove_dhcp_rapid_commit,
-            "setup_captive_portal": self.remove_cpative_portal
+            "setup_captive_portal": self.remove_cpative_portal,
         }
         # This map contains cleanup functions to restore the configuration to
         # its default state. We write these keys to HISTORY_CONFIG_PATH prior to
@@ -130,8 +129,7 @@
         """Apply changes on Access point."""
         self.ssh.run("uci commit")
         self.service_manager.restart_services()
-        self.create_config_file("\n".join(self.config),
-                                HISTORY_CONFIG_PATH)
+        self.create_config_file("\n".join(self.config), HISTORY_CONFIG_PATH)
 
     def package_install(self, package_list):
         """Install packages on OpenWrtAP via opkg If not installed.
@@ -143,8 +141,10 @@
         self.ssh.run("opkg update")
         for package_name in package_list.split(" "):
             if not self._package_installed(package_name):
-                self.ssh.run("opkg install %s" % package_name,
-                             timeout=DEFAULT_PACKAGE_INSTALL_TIMEOUT)
+                self.ssh.run(
+                    "opkg install %s" % package_name,
+                    timeout=DEFAULT_PACKAGE_INSTALL_TIMEOUT,
+                )
                 self.log.info("Package: %s installed." % package_name)
             else:
                 self.log.info("Package: %s skipped (already installed)." % package_name)
@@ -185,8 +185,9 @@
             True if Existed.
         """
         path, file_name = abs_file_path.rsplit("/", 1)
-        if self.ssh.run("ls %s | grep %s" % (path, file_name),
-                        ignore_status=True).stdout:
+        if self.ssh.run(
+            "ls %s | grep %s" % (path, file_name), ignore_status=True
+        ).stdout:
             return True
         return False
 
@@ -211,7 +212,7 @@
         if not self.path_exists(abs_path):
             self.ssh.run("mkdir %s" % abs_path)
         else:
-            self.log.info("%s already existed." %abs_path)
+            self.log.info("%s already existed." % abs_path)
 
     def count(self, config, key):
         """Count in uci config.
@@ -222,8 +223,9 @@
         Returns:
             Numbers of the count.
         """
-        count = self.ssh.run("uci show %s | grep =%s" % (config, key),
-                             ignore_status=True).stdout
+        count = self.ssh.run(
+            "uci show %s | grep =%s" % (config, key), ignore_status=True
+        ).stdout
         return len(count.split("\n"))
 
     def create_config_file(self, config, file_path):
@@ -233,7 +235,7 @@
             config: A string of content of config.
             file_path: Config's abs_path.
         """
-        self.ssh.run("echo -e \"%s\" > %s" % (config, file_path))
+        self.ssh.run('echo -e "%s" > %s' % (config, file_path))
 
     def replace_config_option(self, old_option, new_option, file_path):
         """Replace config option if pattern match.
@@ -324,8 +326,7 @@
 
     def clear_resource_record(self):
         """Delete the all resource record."""
-        rr = self.ssh.run("uci show dhcp | grep =domain",
-                          ignore_status=True).stdout
+        rr = self.ssh.run("uci show dhcp | grep =domain", ignore_status=True).stdout
         if rr:
             for _ in rr.split("\n"):
                 self.del_resource_record()
@@ -408,27 +409,28 @@
         self.ssh.run("uci set pptpd.@login[0].password='%s'" % password)
         self.service_manager.need_restart(SERVICE_PPTPD)
 
-        self.replace_config_option(r"#*ms-dns \d+.\d+.\d+.\d+",
-                                   "ms-dns %s" % ms_dns, PPTPD_OPTION_PATH)
-        self.replace_config_option("(#no)*proxyarp",
-                                   "proxyarp", PPTPD_OPTION_PATH)
+        self.replace_config_option(
+            r"#*ms-dns \d+.\d+.\d+.\d+", "ms-dns %s" % ms_dns, PPTPD_OPTION_PATH
+        )
+        self.replace_config_option("(#no)*proxyarp", "proxyarp", PPTPD_OPTION_PATH)
 
     def restore_pptpd(self):
         """Disable pptpd."""
         self.ssh.run("uci set pptpd.pptpd.enabled=0")
-        self.remove_config_option(r"\S+ pptp-server \S+ \*",
-                                  PPP_CHAP_SECRET_PATH)
+        self.remove_config_option(r"\S+ pptp-server \S+ \*", PPP_CHAP_SECRET_PATH)
         self.service_manager.need_restart(SERVICE_PPTPD)
 
-    def setup_vpn_l2tp_server(self,
-                              vpn_server_hostname,
-                              vpn_server_address,
-                              vpn_username,
-                              vpn_password,
-                              psk_secret,
-                              server_name,
-                              country,
-                              org):
+    def setup_vpn_l2tp_server(
+        self,
+        vpn_server_hostname,
+        vpn_server_address,
+        vpn_username,
+        vpn_password,
+        psk_secret,
+        server_name,
+        country,
+        org,
+    ):
         """Setup l2tp vpn server on OpenWrt.
 
         Args:
@@ -441,12 +443,14 @@
             country: country code for generate cert keys.
             org: Organization name for generate cert keys.
         """
-        self.l2tp = network_const.VpnL2tp(vpn_server_hostname,
-                                          vpn_server_address,
-                                          vpn_username,
-                                          vpn_password,
-                                          psk_secret,
-                                          server_name)
+        self.l2tp = network_const.VpnL2tp(
+            vpn_server_hostname,
+            vpn_server_address,
+            vpn_username,
+            vpn_password,
+            psk_secret,
+            server_name,
+        )
 
         self.package_install(L2TP_PACKAGE)
         self.config.add("setup_vpn_l2tp_server")
@@ -499,19 +503,23 @@
             "       include strongswan.d/charon/*.conf",
             "   }",
             "   dns1=%s" % dns,
-            "}"
+            "}",
         ]
         self.create_config_file("\n".join(config), "/etc/strongswan.conf")
 
     def setup_ipsec(self):
         """Setup ipsec config."""
+
         def load_ipsec_config(data, rightsourceip=False):
             for i in data.keys():
                 config.append(i)
                 for j in data[i].keys():
                     config.append("\t %s=%s" % (j, data[i][j]))
                 if rightsourceip:
-                    config.append("\t rightsourceip=%s.16/26" % self.l2tp.address.rsplit(".", 1)[0])
+                    config.append(
+                        "\t rightsourceip=%s.16/26"
+                        % self.l2tp.address.rsplit(".", 1)[0]
+                    )
                 config.append("")
 
         config = []
@@ -532,8 +540,9 @@
         ipsec_secret = []
         ipsec_secret.append(r": PSK \"%s\"" % self.l2tp.psk_secret)
         ipsec_secret.append(r": RSA \"%s\"" % "serverKey.der")
-        ipsec_secret.append(r"%s : XAUTH \"%s\"" % (self.l2tp.username,
-                                                    self.l2tp.password))
+        ipsec_secret.append(
+            r"%s : XAUTH \"%s\"" % (self.l2tp.username, self.l2tp.password)
+        )
         self.create_config_file("\n".join(ipsec_secret), "/etc/ipsec.secrets")
 
     def setup_xl2tpd(self, ip_range=20):
@@ -542,9 +551,10 @@
         xl2tpd_conf = list(network_const.XL2TPD_CONF_GLOBAL)
         xl2tpd_conf.append("auth file = %s" % PPP_CHAP_SECRET_PATH)
         xl2tpd_conf.extend(network_const.XL2TPD_CONF_INS)
-        xl2tpd_conf.append("ip range = %s.%s-%s.%s" %
-                           (net_id, host_id, net_id,
-                            str(int(host_id)+ip_range)))
+        xl2tpd_conf.append(
+            "ip range = %s.%s-%s.%s"
+            % (net_id, host_id, net_id, str(int(host_id) + ip_range))
+        )
         xl2tpd_conf.append("local ip = %s" % self.l2tp.address)
         xl2tpd_conf.append("name = %s" % self.l2tp.name)
         xl2tpd_conf.append("pppoptfile = %s" % XL2TPD_OPTION_CONFIG_PATH)
@@ -552,16 +562,14 @@
         self.create_config_file("\n".join(xl2tpd_conf), XL2TPD_CONFIG_PATH)
         xl2tpd_option = list(network_const.XL2TPD_OPTION)
         xl2tpd_option.append("name %s" % self.l2tp.name)
-        self.create_config_file("\n".join(xl2tpd_option),
-                                XL2TPD_OPTION_CONFIG_PATH)
+        self.create_config_file("\n".join(xl2tpd_option), XL2TPD_OPTION_CONFIG_PATH)
 
     def setup_ppp_secret(self):
         self.replace_config_option(
             r"\S+ %s \S+ \*" % self.l2tp.name,
-            "%s %s %s *" % (self.l2tp.username,
-                            self.l2tp.name,
-                            self.l2tp.password),
-            PPP_CHAP_SECRET_PATH)
+            "%s %s %s *" % (self.l2tp.username, self.l2tp.name, self.l2tp.password),
+            PPP_CHAP_SECRET_PATH,
+        )
 
     def generate_vpn_cert_keys(self, country, org):
         """Generate cert and keys for vpn server."""
@@ -569,25 +577,32 @@
         lifetime = "--lifetime 365"
         size = "--size 4096"
 
-        self.ssh.run("ipsec pki --gen %s %s --outform der > caKey.der" %
-                     (rsa, size))
-        self.ssh.run("ipsec pki --self --ca %s --in caKey.der %s --dn "
-                     "\"C=%s, O=%s, CN=%s\" --outform der > caCert.der" %
-                     (lifetime, rsa, country, org, self.l2tp.hostname))
-        self.ssh.run("ipsec pki --gen %s %s --outform der > serverKey.der" %
-                     (size, rsa))
-        self.ssh.run("ipsec pki --pub --in serverKey.der %s | ipsec pki "
-                     "--issue %s --cacert caCert.der --cakey caKey.der "
-                     "--dn \"C=%s, O=%s, CN=%s\" --san %s --flag serverAuth"
-                     " --flag ikeIntermediate --outform der > serverCert.der" %
-                     (rsa, lifetime, country, org, self.l2tp.hostname, LOCALHOST))
-        self.ssh.run("ipsec pki --gen %s %s --outform der > clientKey.der" %
-                     (size, rsa))
-        self.ssh.run("ipsec pki --pub --in clientKey.der %s | ipsec pki "
-                     "--issue %s --cacert caCert.der --cakey caKey.der "
-                     "--dn \"C=%s, O=%s, CN=%s@%s\" --outform der > "
-                     "clientCert.der" % (rsa, lifetime, country, org,
-                                         self.l2tp.username, self.l2tp.hostname))
+        self.ssh.run("ipsec pki --gen %s %s --outform der > caKey.der" % (rsa, size))
+        self.ssh.run(
+            "ipsec pki --self --ca %s --in caKey.der %s --dn "
+            '"C=%s, O=%s, CN=%s" --outform der > caCert.der'
+            % (lifetime, rsa, country, org, self.l2tp.hostname)
+        )
+        self.ssh.run(
+            "ipsec pki --gen %s %s --outform der > serverKey.der" % (size, rsa)
+        )
+        self.ssh.run(
+            "ipsec pki --pub --in serverKey.der %s | ipsec pki "
+            "--issue %s --cacert caCert.der --cakey caKey.der "
+            '--dn "C=%s, O=%s, CN=%s" --san %s --flag serverAuth'
+            " --flag ikeIntermediate --outform der > serverCert.der"
+            % (rsa, lifetime, country, org, self.l2tp.hostname, LOCALHOST)
+        )
+        self.ssh.run(
+            "ipsec pki --gen %s %s --outform der > clientKey.der" % (size, rsa)
+        )
+        self.ssh.run(
+            "ipsec pki --pub --in clientKey.der %s | ipsec pki "
+            "--issue %s --cacert caCert.der --cakey caKey.der "
+            '--dn "C=%s, O=%s, CN=%s@%s" --outform der > '
+            "clientCert.der"
+            % (rsa, lifetime, country, org, self.l2tp.username, self.l2tp.hostname)
+        )
 
         self.ssh.run(
             "openssl rsa -inform DER -in clientKey.der"
@@ -598,8 +613,7 @@
             " -out clientCert.pem -outform PEM"
         )
         self.ssh.run(
-            "openssl x509 -inform DER -in caCert.der"
-            " -out caCert.pem -outform PEM"
+            "openssl x509 -inform DER -in caCert.der" " -out caCert.pem -outform PEM"
         )
         self.ssh.run(
             "openssl pkcs12 -in clientCert.pem -inkey  clientKey.pem"
@@ -625,29 +639,45 @@
         ikev2_vpn_cert_keys = [
             "ipsec pki --gen %s %s --outform der > caKey.der" % (rsa, size),
             "ipsec pki --self --ca %s --in caKey.der %s --dn "
-            "\"C=%s, O=%s, CN=%s\" --outform der > caCert.der" %
-            (lifetime, rsa, country, org, self.l2tp.hostname),
+            '"C=%s, O=%s, CN=%s" --outform der > caCert.der'
+            % (lifetime, rsa, country, org, self.l2tp.hostname),
             "ipsec pki --gen %s %s --outform der > serverKey.der" % (size, rsa),
             "ipsec pki --pub --in serverKey.der %s | ipsec pki --issue %s "
             r"--cacert caCert.der --cakey caKey.der --dn \"C=%s, O=%s, CN=%s\" "
             "--san %s --san %s --flag serverAuth --flag ikeIntermediate "
-            "--outform der > serverCert.der" % (rsa, lifetime, country, org,
-                                                self.l2tp.hostname, LOCALHOST,
-                                                self.l2tp.hostname),
+            "--outform der > serverCert.der"
+            % (
+                rsa,
+                lifetime,
+                country,
+                org,
+                self.l2tp.hostname,
+                LOCALHOST,
+                self.l2tp.hostname,
+            ),
             "ipsec pki --gen %s %s --outform der > clientKey.der" % (size, rsa),
             "ipsec pki --pub --in clientKey.der %s | ipsec pki --issue %s "
             r"--cacert caCert.der --cakey caKey.der --dn \"C=%s, O=%s, CN=%s@%s\" "
             r"--san \"%s\" --san \"%s@%s\" --san \"%s@%s\" --outform der "
-            "> clientCert.der" % (rsa, lifetime, country, org, self.l2tp.username,
-                                  self.l2tp.hostname, self.l2tp.username,
-                                  self.l2tp.username, LOCALHOST,
-                                  self.l2tp.username, self.l2tp.hostname),
+            "> clientCert.der"
+            % (
+                rsa,
+                lifetime,
+                country,
+                org,
+                self.l2tp.username,
+                self.l2tp.hostname,
+                self.l2tp.username,
+                self.l2tp.username,
+                LOCALHOST,
+                self.l2tp.username,
+                self.l2tp.hostname,
+            ),
             "openssl rsa -inform DER -in clientKey.der "
             "-out clientKey.pem -outform PEM",
             "openssl x509 -inform DER -in clientCert.der "
             "-out clientCert.pem -outform PEM",
-            "openssl x509 -inform DER -in caCert.der "
-            "-out caCert.pem -outform PEM",
+            "openssl x509 -inform DER -in caCert.der " "-out caCert.pem -outform PEM",
             "openssl pkcs12 -in clientCert.pem -inkey  clientKey.pem "
             "-certfile caCert.pem -export -out clientPkcs.p12 -passout pass:",
             "mv caCert.pem /etc/ipsec.d/cacerts/",
@@ -697,12 +727,14 @@
         """Restore firewall for vpn pptp server."""
         self.update_firewall_rules_list()
         if "pptpd" in self.firewall_rules_list:
-            self.ssh.run("uci del firewall.@rule[%s]"
-                         % self.firewall_rules_list.index("pptpd"))
+            self.ssh.run(
+                "uci del firewall.@rule[%s]" % self.firewall_rules_list.index("pptpd")
+            )
         self.update_firewall_rules_list()
         if "GRP" in self.firewall_rules_list:
-            self.ssh.run("uci del firewall.@rule[%s]"
-                         % self.firewall_rules_list.index("GRP"))
+            self.ssh.run(
+                "uci del firewall.@rule[%s]" % self.firewall_rules_list.index("GRP")
+            )
         self.remove_custom_firewall_rules()
         self.service_manager.need_restart(SERVICE_FIREWALL)
 
@@ -733,10 +765,11 @@
 
         net_id = self.l2tp.address.rsplit(".", 1)[0]
         iptable_rules = list(network_const.FIREWALL_RULES_FOR_L2TP)
-        iptable_rules.append("iptables -A FORWARD -s %s.0/24"
-                             "  -j ACCEPT" % net_id)
-        iptable_rules.append("iptables -t nat -A POSTROUTING"
-                             " -s %s.0/24 -o eth0.2 -j MASQUERADE" % net_id)
+        iptable_rules.append("iptables -A FORWARD -s %s.0/24" "  -j ACCEPT" % net_id)
+        iptable_rules.append(
+            "iptables -t nat -A POSTROUTING"
+            " -s %s.0/24 -o eth0.2 -j MASQUERADE" % net_id
+        )
 
         self.add_custom_firewall_rules(iptable_rules)
         self.service_manager.need_restart(SERVICE_FIREWALL)
@@ -745,16 +778,22 @@
         """Restore firewall for vpn l2tp server."""
         self.update_firewall_rules_list()
         if "ipsec esp" in self.firewall_rules_list:
-            self.ssh.run("uci del firewall.@rule[%s]"
-                         % self.firewall_rules_list.index("ipsec esp"))
+            self.ssh.run(
+                "uci del firewall.@rule[%s]"
+                % self.firewall_rules_list.index("ipsec esp")
+            )
         self.update_firewall_rules_list()
         if "ipsec nat-t" in self.firewall_rules_list:
-            self.ssh.run("uci del firewall.@rule[%s]"
-                         % self.firewall_rules_list.index("ipsec nat-t"))
+            self.ssh.run(
+                "uci del firewall.@rule[%s]"
+                % self.firewall_rules_list.index("ipsec nat-t")
+            )
         self.update_firewall_rules_list()
         if "auth header" in self.firewall_rules_list:
-            self.ssh.run("uci del firewall.@rule[%s]"
-                         % self.firewall_rules_list.index("auth header"))
+            self.ssh.run(
+                "uci del firewall.@rule[%s]"
+                % self.firewall_rules_list.index("auth header")
+            )
         self.remove_custom_firewall_rules()
         self.service_manager.need_restart(SERVICE_FIREWALL)
 
@@ -764,22 +803,20 @@
         Args:
             rules: A list of iptable rules to apply.
         """
-        backup_file_path = FIREWALL_CUSTOM_OPTION_PATH+".backup"
+        backup_file_path = FIREWALL_CUSTOM_OPTION_PATH + ".backup"
         if not self.file_exists(backup_file_path):
-            self.ssh.run("mv %s %s" % (FIREWALL_CUSTOM_OPTION_PATH,
-                                       backup_file_path))
+            self.ssh.run("mv %s %s" % (FIREWALL_CUSTOM_OPTION_PATH, backup_file_path))
         for rule in rules:
             self.ssh.run("echo %s >> %s" % (rule, FIREWALL_CUSTOM_OPTION_PATH))
 
     def remove_custom_firewall_rules(self):
         """Clean up and recover custom firewall rules."""
-        backup_file_path = FIREWALL_CUSTOM_OPTION_PATH+".backup"
+        backup_file_path = FIREWALL_CUSTOM_OPTION_PATH + ".backup"
         if self.file_exists(backup_file_path):
-            self.ssh.run("mv %s %s" % (backup_file_path,
-                                       FIREWALL_CUSTOM_OPTION_PATH))
+            self.ssh.run("mv %s %s" % (backup_file_path, FIREWALL_CUSTOM_OPTION_PATH))
         else:
             self.log.debug("Did not find %s" % backup_file_path)
-            self.ssh.run("echo "" > %s" % FIREWALL_CUSTOM_OPTION_PATH)
+            self.ssh.run("echo " " > %s" % FIREWALL_CUSTOM_OPTION_PATH)
 
     def disable_pptp_service(self):
         """Disable pptp service."""
@@ -791,7 +828,7 @@
         self.ssh.run("uci set network.lan2.type=bridge")
         self.ssh.run("uci set network.lan2.ifname=eth1.2")
         self.ssh.run("uci set network.lan2.proto=static")
-        self.ssh.run("uci set network.lan2.ipaddr=\"%s\"" % self.l2tp.address)
+        self.ssh.run('uci set network.lan2.ipaddr="%s"' % self.l2tp.address)
         self.ssh.run("uci set network.lan2.netmask=255.255.255.0")
         self.ssh.run("uci set network.lan2=interface")
         self.service_manager.reload(SERVICE_NETWORK)
@@ -855,10 +892,10 @@
             self.commit_changes()
 
     def _add_dhcp_option(self, args):
-        self.ssh.run("uci add_list dhcp.lan.dhcp_option=\"%s\"" % args)
+        self.ssh.run('uci add_list dhcp.lan.dhcp_option="%s"' % args)
 
     def _remove_dhcp_option(self, args):
-        self.ssh.run("uci del_list dhcp.lan.dhcp_option=\"%s\"" % args)
+        self.ssh.run('uci del_list dhcp.lan.dhcp_option="%s"' % args)
 
     def add_default_dns(self, addr_list):
         """Add default dns server for client.
@@ -888,7 +925,7 @@
         Args:
             addr_list: dns ip address for Openwrt client.
         """
-        self.ssh.run("uci add_list dhcp.lan.dns=\"%s\"" % addr_list)
+        self.ssh.run('uci add_list dhcp.lan.dns="%s"' % addr_list)
         self.config.add("default_v6_dns %s" % addr_list)
         self.service_manager.need_restart(SERVICE_ODHCPD)
         self.commit_changes()
@@ -899,7 +936,7 @@
         Args:
             addr_list: dns ip address for Openwrt client.
         """
-        self.ssh.run("uci del_list dhcp.lan.dns=\"%s\"" % addr_list)
+        self.ssh.run('uci del_list dhcp.lan.dns="%s"' % addr_list)
         self.config.add("default_v6_dns %s" % addr_list)
         self.service_manager.need_restart(SERVICE_ODHCPD)
         self.commit_changes()
@@ -917,19 +954,19 @@
         self.commit_changes()
 
     def add_dhcp_rapid_commit(self):
-        self.create_config_file("dhcp-rapid-commit\n","/etc/dnsmasq.conf")
+        self.create_config_file("dhcp-rapid-commit\n", "/etc/dnsmasq.conf")
         self.config.add("add_dhcp_rapid_commit")
         self.service_manager.need_restart(SERVICE_DNSMASQ)
         self.commit_changes()
 
     def remove_dhcp_rapid_commit(self):
-        self.create_config_file("","/etc/dnsmasq.conf")
+        self.create_config_file("", "/etc/dnsmasq.conf")
         self.config.discard("add_dhcp_rapid_commit")
         self.service_manager.need_restart(SERVICE_DNSMASQ)
         self.commit_changes()
 
     def start_tcpdump(self, test_name, args="", interface="br-lan"):
-        """"Start tcpdump on OpenWrt.
+        """ "Start tcpdump on OpenWrt.
 
         Args:
             test_name: Test name for create tcpdump file name.
@@ -942,8 +979,10 @@
         self.package_install("tcpdump")
         if not self.path_exists(TCPDUMP_DIR):
             self.ssh.run("mkdir %s" % TCPDUMP_DIR)
-        tcpdump_file_name = "openwrt_%s_%s.pcap" % (test_name,
-                                                    time.strftime("%Y-%m-%d_%H-%M-%S", time.localtime(time.time())))
+        tcpdump_file_name = "openwrt_%s_%s.pcap" % (
+            test_name,
+            time.strftime("%Y-%m-%d_%H-%M-%S", time.localtime(time.time())),
+        )
         tcpdump_file_path = "".join([TCPDUMP_DIR, tcpdump_file_name])
         cmd = "tcpdump -i %s -s0 %s -w %s" % (interface, args, tcpdump_file_path)
         self.ssh.run_async(cmd)
@@ -988,7 +1027,9 @@
 
     def _get_tcpdump_pid(self, tcpdump_file_name):
         """Check tcpdump process on OpenWrt."""
-        return self.ssh.run("pgrep -f %s" % (tcpdump_file_name), ignore_status=True).stdout
+        return self.ssh.run(
+            "pgrep -f %s" % (tcpdump_file_name), ignore_status=True
+        ).stdout
 
     def setup_mdns(self):
         self.config.add("setup_mdns")
@@ -1013,7 +1054,7 @@
         self.service_manager.need_restart(SERVICE_FIREWALL)
         self.commit_changes()
 
-    def setup_captive_portal(self, fas_fdqn,fas_port=2080):
+    def setup_captive_portal(self, fas_fdqn, fas_port=2080):
         """Create captive portal with Forwarding Authentication Service.
 
         Args:
@@ -1026,7 +1067,7 @@
         self.ssh.run("uci set opennds.@opennds[0].gatewayport=2050")
         self.ssh.run("uci set opennds.@opennds[0].fasport=%s" % fas_port)
         self.ssh.run("uci set opennds.@opennds[0].fasremotefqdn=%s" % fas_fdqn)
-        self.ssh.run("uci set opennds.@opennds[0].faspath=\"/nds/fas-aes.php\"")
+        self.ssh.run('uci set opennds.@opennds[0].faspath="/nds/fas-aes.php"')
         self.ssh.run("uci set opennds.@opennds[0].faskey=1234567890")
         self.service_manager.need_restart(SERVICE_OPENNDS)
         # Config uhttpd
@@ -1055,8 +1096,8 @@
         self.clear_resource_record()
         # Restore uhttpd
         self.ssh.run("uci del uhttpd.main.interpreter")
-        self.ssh.run("uci del_list uhttpd.main.listen_http=\'0.0.0.0:%s\'" % fas_port)
-        self.ssh.run("uci del_list uhttpd.main.listen_http=\'[::]:%s\'" % fas_port)
+        self.ssh.run("uci del_list uhttpd.main.listen_http='0.0.0.0:%s'" % fas_port)
+        self.ssh.run("uci del_list uhttpd.main.listen_http='[::]:%s'" % fas_port)
         self.service_manager.need_restart(SERVICE_UHTTPD)
         # Clean web root
         self.ssh.run("rm -r /www/nds")
@@ -1067,9 +1108,9 @@
 class ServiceManager(object):
     """Class for service on OpenWrt.
 
-        Attributes:
-        ssh: ssh object for the AP.
-        _need_restart: Record service need to restart.
+    Attributes:
+    ssh: ssh object for the AP.
+    _need_restart: Record service need to restart.
     """
 
     def __init__(self, ssh):
diff --git a/src/antlion/controllers/openwrt_lib/openwrt_constants.py b/src/antlion/controllers/openwrt_lib/openwrt_constants.py
index 8fd7686..3b591d5 100644
--- a/src/antlion/controllers/openwrt_lib/openwrt_constants.py
+++ b/src/antlion/controllers/openwrt_lib/openwrt_constants.py
@@ -18,21 +18,21 @@
 
 
 class OpenWrtWifiSecurity:
-  # Used by OpenWrt AP
-  WPA_PSK_DEFAULT = "psk"
-  WPA_PSK_CCMP = "psk+ccmp"
-  WPA_PSK_TKIP = "psk+tkip"
-  WPA_PSK_TKIP_AND_CCMP = "psk+tkip+ccmp"
-  WPA2_PSK_DEFAULT = "psk2"
-  WPA2_PSK_CCMP = "psk2+ccmp"
-  WPA2_PSK_TKIP = "psk2+tkip"
-  WPA2_PSK_TKIP_AND_CCMP = "psk2+tkip+ccmp"
+    # Used by OpenWrt AP
+    WPA_PSK_DEFAULT = "psk"
+    WPA_PSK_CCMP = "psk+ccmp"
+    WPA_PSK_TKIP = "psk+tkip"
+    WPA_PSK_TKIP_AND_CCMP = "psk+tkip+ccmp"
+    WPA2_PSK_DEFAULT = "psk2"
+    WPA2_PSK_CCMP = "psk2+ccmp"
+    WPA2_PSK_TKIP = "psk2+tkip"
+    WPA2_PSK_TKIP_AND_CCMP = "psk2+tkip+ccmp"
 
 
 class OpenWrtWifiSetting:
-  IFACE_2G = 2
-  IFACE_5G = 3
+    IFACE_2G = 2
+    IFACE_5G = 3
 
 
 class OpenWrtModelMap:
-  NETGEAR_R8000 = ("radio2", "radio1")
+    NETGEAR_R8000 = ("radio2", "radio1")
diff --git a/src/antlion/controllers/openwrt_lib/wireless_config.py b/src/antlion/controllers/openwrt_lib/wireless_config.py
index 7810fa2..9cdb309 100644
--- a/src/antlion/controllers/openwrt_lib/wireless_config.py
+++ b/src/antlion/controllers/openwrt_lib/wireless_config.py
@@ -4,50 +4,50 @@
 
 
 class WirelessConfig(object):
-  """Creates an object to hold wireless config.
+    """Creates an object to hold wireless config.
 
-  Attributes:
-    name: name of the wireless config
-    ssid: SSID of the network.
-    security: security of the wifi network.
-    band: band of the wifi network.
-    iface: network interface of the wifi network.
-    password: password for psk network.
-    wep_key: wep keys for wep network.
-    wep_key_num: key number for wep network.
-    radius_server_ip: IP address of radius server.
-    radius_server_port: Port number of radius server.
-    radius_server_secret: Secret key of radius server.
-    hidden: Boolean, if the wifi network is hidden.
-    ieee80211w: PMF bit of the wifi network.
-  """
+    Attributes:
+      name: name of the wireless config
+      ssid: SSID of the network.
+      security: security of the wifi network.
+      band: band of the wifi network.
+      iface: network interface of the wifi network.
+      password: password for psk network.
+      wep_key: wep keys for wep network.
+      wep_key_num: key number for wep network.
+      radius_server_ip: IP address of radius server.
+      radius_server_port: Port number of radius server.
+      radius_server_secret: Secret key of radius server.
+      hidden: Boolean, if the wifi network is hidden.
+      ieee80211w: PMF bit of the wifi network.
+    """
 
-  def __init__(
-      self,
-      name,
-      ssid,
-      security,
-      band,
-      iface=NET_IFACE,
-      password=None,
-      wep_key=None,
-      wep_key_num=1,
-      radius_server_ip=None,
-      radius_server_port=None,
-      radius_server_secret=None,
-      hidden=False,
-      ieee80211w=None):
-    self.name = name
-    self.ssid = ssid
-    self.security = security
-    self.band = band
-    self.iface = iface
-    self.password = password
-    self.wep_key = wep_key
-    self.wep_key_num = wep_key_num
-    self.radius_server_ip = radius_server_ip
-    self.radius_server_port = radius_server_port
-    self.radius_server_secret = radius_server_secret
-    self.hidden = hidden
-    self.ieee80211w = ieee80211w
-
+    def __init__(
+        self,
+        name,
+        ssid,
+        security,
+        band,
+        iface=NET_IFACE,
+        password=None,
+        wep_key=None,
+        wep_key_num=1,
+        radius_server_ip=None,
+        radius_server_port=None,
+        radius_server_secret=None,
+        hidden=False,
+        ieee80211w=None,
+    ):
+        self.name = name
+        self.ssid = ssid
+        self.security = security
+        self.band = band
+        self.iface = iface
+        self.password = password
+        self.wep_key = wep_key
+        self.wep_key_num = wep_key_num
+        self.radius_server_ip = radius_server_ip
+        self.radius_server_port = radius_server_port
+        self.radius_server_secret = radius_server_secret
+        self.hidden = hidden
+        self.ieee80211w = ieee80211w
diff --git a/src/antlion/controllers/openwrt_lib/wireless_settings_applier.py b/src/antlion/controllers/openwrt_lib/wireless_settings_applier.py
index ad36482..d899a30 100644
--- a/src/antlion/controllers/openwrt_lib/wireless_settings_applier.py
+++ b/src/antlion/controllers/openwrt_lib/wireless_settings_applier.py
@@ -6,7 +6,6 @@
 from antlion.controllers.openwrt_lib.network_settings import SERVICE_DNSMASQ
 from antlion.controllers.openwrt_lib.network_settings import ServiceManager
 
-
 LEASE_FILE = "/tmp/dhcp.leases"
 OPEN_SECURITY = "none"
 PSK1_SECURITY = "psk"
@@ -24,131 +23,174 @@
 
 
 class WirelessSettingsApplier(object):
-  """Class for wireless settings.
+    """Class for wireless settings.
 
-  Attributes:
-    ssh: ssh object for the AP.
-    service_manager: Object manage service configuration
-    wireless_configs: a list of
-      antlion.controllers.openwrt_lib.wireless_config.WirelessConfig.
-    channel_2g: channel for 2G band.
-    channel_5g: channel for 5G band.
-  """
-
-  def __init__(self, ssh, configs, channel_2g, channel_5g, radio_2g=RADIO_2G, radio_5g=RADIO_5G):
-    """Initialize wireless settings.
-
-    Args:
-      ssh: ssh connection object.
-      configs: a list of
+    Attributes:
+      ssh: ssh object for the AP.
+      service_manager: Object manage service configuration
+      wireless_configs: a list of
         antlion.controllers.openwrt_lib.wireless_config.WirelessConfig.
       channel_2g: channel for 2G band.
       channel_5g: channel for 5G band.
     """
-    self.ssh = ssh
-    self.service_manager = ServiceManager(ssh)
-    self.wireless_configs = configs
-    self.channel_2g = channel_2g
-    self.channel_5g = channel_5g
-    self.radio_2g = radio_2g
-    self.radio_5g = radio_5g
 
-  def apply_wireless_settings(self):
-    """Configure wireless settings from a list of configs."""
-    default_2g_iface = "default_" + self.radio_2g
-    default_5g_iface = "default_" + self.radio_5g
+    def __init__(
+        self, ssh, configs, channel_2g, channel_5g, radio_2g=RADIO_2G, radio_5g=RADIO_5G
+    ):
+        """Initialize wireless settings.
 
-    # set channels for 2G and 5G bands
-    self.ssh.run("uci set wireless.%s.channel='%s'" % (self.radio_2g, self.channel_2g))
-    self.ssh.run("uci set wireless.%s.channel='%s'" % (self.radio_5g, self.channel_5g))
-    if self.channel_5g == 165:
-      self.ssh.run("uci set wireless.%s.htmode='VHT20'" % self.radio_5g)
-    elif self.channel_5g == 132 or self.channel_5g == 136:
-      self.ssh.run("iw reg set ZA")
-      self.ssh.run("uci set wireless.%s.htmode='VHT40'" % self.radio_5g)
+        Args:
+          ssh: ssh connection object.
+          configs: a list of
+            antlion.controllers.openwrt_lib.wireless_config.WirelessConfig.
+          channel_2g: channel for 2G band.
+          channel_5g: channel for 5G band.
+        """
+        self.ssh = ssh
+        self.service_manager = ServiceManager(ssh)
+        self.wireless_configs = configs
+        self.channel_2g = channel_2g
+        self.channel_5g = channel_5g
+        self.radio_2g = radio_2g
+        self.radio_5g = radio_5g
 
-    if self.channel_2g == 13:
-      self.ssh.run("iw reg set AU")
+    def apply_wireless_settings(self):
+        """Configure wireless settings from a list of configs."""
+        default_2g_iface = "default_" + self.radio_2g
+        default_5g_iface = "default_" + self.radio_5g
 
-    # disable default OpenWrt SSID
-    self.ssh.run("uci set wireless.%s.disabled='%s'" %
-                 (default_2g_iface, DISABLE_RADIO))
-    self.ssh.run("uci set wireless.%s.disabled='%s'" %
-                 (default_5g_iface, DISABLE_RADIO))
+        # set channels for 2G and 5G bands
+        self.ssh.run(
+            "uci set wireless.%s.channel='%s'" % (self.radio_2g, self.channel_2g)
+        )
+        self.ssh.run(
+            "uci set wireless.%s.channel='%s'" % (self.radio_5g, self.channel_5g)
+        )
+        if self.channel_5g == 165:
+            self.ssh.run("uci set wireless.%s.htmode='VHT20'" % self.radio_5g)
+        elif self.channel_5g == 132 or self.channel_5g == 136:
+            self.ssh.run("iw reg set ZA")
+            self.ssh.run("uci set wireless.%s.htmode='VHT40'" % self.radio_5g)
 
-    # Enable radios
-    self.ssh.run("uci set wireless.%s.disabled='%s'" % (self.radio_2g, ENABLE_RADIO))
-    self.ssh.run("uci set wireless.%s.disabled='%s'" % (self.radio_5g, ENABLE_RADIO))
+        if self.channel_2g == 13:
+            self.ssh.run("iw reg set AU")
 
-    for config in self.wireless_configs:
+        # disable default OpenWrt SSID
+        self.ssh.run(
+            "uci set wireless.%s.disabled='%s'" % (default_2g_iface, DISABLE_RADIO)
+        )
+        self.ssh.run(
+            "uci set wireless.%s.disabled='%s'" % (default_5g_iface, DISABLE_RADIO)
+        )
 
-      # configure open network
-      if config.security == OPEN_SECURITY:
-        if config.band == hostapd_constants.BAND_2G:
-          self.ssh.run("uci set wireless.%s.ssid='%s'" %
-                       (default_2g_iface, config.ssid))
-          self.ssh.run("uci set wireless.%s.disabled='%s'" %
-                       (default_2g_iface, ENABLE_RADIO))
-          if config.hidden:
-            self.ssh.run("uci set wireless.%s.hidden='%s'" %
-                         (default_2g_iface, ENABLE_HIDDEN))
-        elif config.band == hostapd_constants.BAND_5G:
-          self.ssh.run("uci set wireless.%s.ssid='%s'" %
-                       (default_5g_iface, config.ssid))
-          self.ssh.run("uci set wireless.%s.disabled='%s'" %
-                       (default_5g_iface, ENABLE_RADIO))
-          if config.hidden:
-            self.ssh.run("uci set wireless.%s.hidden='%s'" %
-                         (default_5g_iface, ENABLE_HIDDEN))
-        continue
+        # Enable radios
+        self.ssh.run(
+            "uci set wireless.%s.disabled='%s'" % (self.radio_2g, ENABLE_RADIO)
+        )
+        self.ssh.run(
+            "uci set wireless.%s.disabled='%s'" % (self.radio_5g, ENABLE_RADIO)
+        )
 
-      self.ssh.run("uci set wireless.%s='wifi-iface'" % config.name)
-      if config.band == hostapd_constants.BAND_2G:
-        self.ssh.run("uci set wireless.%s.device='%s'" % (config.name, self.radio_2g))
-      else:
-        self.ssh.run("uci set wireless.%s.device='%s'" % (config.name, self.radio_5g))
-      self.ssh.run("uci set wireless.%s.network='%s'" %
-                   (config.name, config.iface))
-      self.ssh.run("uci set wireless.%s.mode='ap'" % config.name)
-      self.ssh.run("uci set wireless.%s.ssid='%s'" %
-                   (config.name, config.ssid))
-      self.ssh.run("uci set wireless.%s.encryption='%s'" %
-                   (config.name, config.security))
-      if config.security == PSK_SECURITY or config.security == SAE_SECURITY\
-              or config.security == PSK1_SECURITY\
-              or config.security == SAEMIXED_SECURITY:
-        self.ssh.run("uci set wireless.%s.key='%s'" %
-                     (config.name, config.password))
-      elif config.security == WEP_SECURITY:
-        self.ssh.run("uci set wireless.%s.key%s='%s'" %
-                     (config.name, config.wep_key_num, config.wep_key))
-        self.ssh.run("uci set wireless.%s.key='%s'" %
-                     (config.name, config.wep_key_num))
-      elif config.security == ENT_SECURITY:
-        self.ssh.run("uci set wireless.%s.auth_secret='%s'" %
-                     (config.name, config.radius_server_secret))
-        self.ssh.run("uci set wireless.%s.auth_server='%s'" %
-                     (config.name, config.radius_server_ip))
-        self.ssh.run("uci set wireless.%s.auth_port='%s'" %
-                     (config.name, config.radius_server_port))
-      if config.ieee80211w:
-        self.ssh.run("uci set wireless.%s.ieee80211w='%s'" %
-                     (config.name, config.ieee80211w))
-      if config.hidden:
-        self.ssh.run("uci set wireless.%s.hidden='%s'" %
-                     (config.name, ENABLE_HIDDEN))
+        for config in self.wireless_configs:
+            # configure open network
+            if config.security == OPEN_SECURITY:
+                if config.band == hostapd_constants.BAND_2G:
+                    self.ssh.run(
+                        "uci set wireless.%s.ssid='%s'"
+                        % (default_2g_iface, config.ssid)
+                    )
+                    self.ssh.run(
+                        "uci set wireless.%s.disabled='%s'"
+                        % (default_2g_iface, ENABLE_RADIO)
+                    )
+                    if config.hidden:
+                        self.ssh.run(
+                            "uci set wireless.%s.hidden='%s'"
+                            % (default_2g_iface, ENABLE_HIDDEN)
+                        )
+                elif config.band == hostapd_constants.BAND_5G:
+                    self.ssh.run(
+                        "uci set wireless.%s.ssid='%s'"
+                        % (default_5g_iface, config.ssid)
+                    )
+                    self.ssh.run(
+                        "uci set wireless.%s.disabled='%s'"
+                        % (default_5g_iface, ENABLE_RADIO)
+                    )
+                    if config.hidden:
+                        self.ssh.run(
+                            "uci set wireless.%s.hidden='%s'"
+                            % (default_5g_iface, ENABLE_HIDDEN)
+                        )
+                continue
 
-    self.ssh.run("uci commit wireless")
-    self.ssh.run("cp %s %s.tmp" % (LEASE_FILE, LEASE_FILE))
+            self.ssh.run("uci set wireless.%s='wifi-iface'" % config.name)
+            if config.band == hostapd_constants.BAND_2G:
+                self.ssh.run(
+                    "uci set wireless.%s.device='%s'" % (config.name, self.radio_2g)
+                )
+            else:
+                self.ssh.run(
+                    "uci set wireless.%s.device='%s'" % (config.name, self.radio_5g)
+                )
+            self.ssh.run(
+                "uci set wireless.%s.network='%s'" % (config.name, config.iface)
+            )
+            self.ssh.run("uci set wireless.%s.mode='ap'" % config.name)
+            self.ssh.run("uci set wireless.%s.ssid='%s'" % (config.name, config.ssid))
+            self.ssh.run(
+                "uci set wireless.%s.encryption='%s'" % (config.name, config.security)
+            )
+            if (
+                config.security == PSK_SECURITY
+                or config.security == SAE_SECURITY
+                or config.security == PSK1_SECURITY
+                or config.security == SAEMIXED_SECURITY
+            ):
+                self.ssh.run(
+                    "uci set wireless.%s.key='%s'" % (config.name, config.password)
+                )
+            elif config.security == WEP_SECURITY:
+                self.ssh.run(
+                    "uci set wireless.%s.key%s='%s'"
+                    % (config.name, config.wep_key_num, config.wep_key)
+                )
+                self.ssh.run(
+                    "uci set wireless.%s.key='%s'" % (config.name, config.wep_key_num)
+                )
+            elif config.security == ENT_SECURITY:
+                self.ssh.run(
+                    "uci set wireless.%s.auth_secret='%s'"
+                    % (config.name, config.radius_server_secret)
+                )
+                self.ssh.run(
+                    "uci set wireless.%s.auth_server='%s'"
+                    % (config.name, config.radius_server_ip)
+                )
+                self.ssh.run(
+                    "uci set wireless.%s.auth_port='%s'"
+                    % (config.name, config.radius_server_port)
+                )
+            if config.ieee80211w:
+                self.ssh.run(
+                    "uci set wireless.%s.ieee80211w='%s'"
+                    % (config.name, config.ieee80211w)
+                )
+            if config.hidden:
+                self.ssh.run(
+                    "uci set wireless.%s.hidden='%s'" % (config.name, ENABLE_HIDDEN)
+                )
 
-  def cleanup_wireless_settings(self):
-    """Reset wireless settings to default."""
-    self.ssh.run("wifi down")
-    self.ssh.run("rm -f /etc/config/wireless")
-    self.ssh.run("wifi config")
-    if self.channel_5g == 132:
-      self.ssh.run("iw reg set US")
-    self.ssh.run("cp %s.tmp %s" % (LEASE_FILE, LEASE_FILE))
-    self.service_manager.restart(SERVICE_DNSMASQ)
-    time.sleep(9)
+        self.ssh.run("uci commit wireless")
+        self.ssh.run("cp %s %s.tmp" % (LEASE_FILE, LEASE_FILE))
 
+    def cleanup_wireless_settings(self):
+        """Reset wireless settings to default."""
+        self.ssh.run("wifi down")
+        self.ssh.run("rm -f /etc/config/wireless")
+        self.ssh.run("wifi config")
+        if self.channel_5g == 132:
+            self.ssh.run("iw reg set US")
+        self.ssh.run("cp %s.tmp %s" % (LEASE_FILE, LEASE_FILE))
+        self.service_manager.restart(SERVICE_DNSMASQ)
+        time.sleep(9)
diff --git a/src/antlion/controllers/packet_capture.py b/src/antlion/controllers/packet_capture.py
index 706f9c4..ce3d8fd 100755
--- a/src/antlion/controllers/packet_capture.py
+++ b/src/antlion/controllers/packet_capture.py
@@ -14,39 +14,36 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import os
+import threading
+import time
+
 from antlion import logger
-from antlion.controllers.ap_lib.hostapd_constants import AP_DEFAULT_CHANNEL_2G
-from antlion.controllers.ap_lib.hostapd_constants import AP_DEFAULT_CHANNEL_5G
-from antlion.controllers.ap_lib.hostapd_constants import CHANNEL_MAP
 from antlion.controllers.ap_lib.hostapd_constants import FREQUENCY_MAP
 from antlion.controllers.ap_lib.hostapd_constants import CENTER_CHANNEL_MAP
 from antlion.controllers.ap_lib.hostapd_constants import VHT_CHANNEL
 from antlion.controllers.utils_lib.ssh import connection
 from antlion.controllers.utils_lib.ssh import formatter
 from antlion.controllers.utils_lib.ssh import settings
-from antlion.libs.logging import log_stream
 from antlion.libs.proc.process import Process
-from antlion import asserts
 
-import os
-import threading
-import time
+from mobly import asserts
 
-MOBLY_CONTROLLER_CONFIG_NAME = 'PacketCapture'
-ACTS_CONTROLLER_REFERENCE_NAME = 'packet_capture'
-BSS = 'BSS'
-BSSID = 'BSSID'
-FREQ = 'freq'
-FREQUENCY = 'frequency'
-LEVEL = 'level'
-MON_2G = 'mon0'
-MON_5G = 'mon1'
-BAND_IFACE = {'2G': MON_2G, '5G': MON_5G}
-SCAN_IFACE = 'wlan2'
+MOBLY_CONTROLLER_CONFIG_NAME = "PacketCapture"
+ACTS_CONTROLLER_REFERENCE_NAME = "packet_capture"
+BSS = "BSS"
+BSSID = "BSSID"
+FREQ = "freq"
+FREQUENCY = "frequency"
+LEVEL = "level"
+MON_2G = "mon0"
+MON_5G = "mon1"
+BAND_IFACE = {"2G": MON_2G, "5G": MON_5G}
+SCAN_IFACE = "wlan2"
 SCAN_TIMEOUT = 60
-SEP = ':'
-SIGNAL = 'signal'
-SSID = 'SSID'
+SEP = ":"
+SIGNAL = "signal"
+SSID = "SSID"
 
 
 def create(configs):
@@ -100,19 +97,21 @@
         Args:
             configs: config for the packet capture.
         """
-        self.ssh_settings = settings.from_config(configs['ssh_config'])
+        self.ssh_settings = settings.from_config(configs["ssh_config"])
         self.ssh = connection.SshConnection(self.ssh_settings)
-        self.log = logger.create_logger(lambda msg: '[%s|%s] %s' % (
-            MOBLY_CONTROLLER_CONFIG_NAME, self.ssh_settings.hostname, msg))
+        self.log = logger.create_logger(
+            lambda msg: "[%s|%s] %s"
+            % (MOBLY_CONTROLLER_CONFIG_NAME, self.ssh_settings.hostname, msg)
+        )
 
-        self._create_interface(MON_2G, 'monitor')
-        self._create_interface(MON_5G, 'monitor')
+        self._create_interface(MON_2G, "monitor")
+        self._create_interface(MON_5G, "monitor")
         self.managed_mode = True
-        result = self.ssh.run('ifconfig -a', ignore_status=True)
+        result = self.ssh.run("ifconfig -a", ignore_status=True)
         if result.stderr or SCAN_IFACE not in result.stdout:
             self.managed_mode = False
         if self.managed_mode:
-            self._create_interface(SCAN_IFACE, 'managed')
+            self._create_interface(SCAN_IFACE, "managed")
 
         self.pcap_properties = dict()
         self._pcap_stop_lock = threading.Lock()
@@ -122,26 +121,24 @@
 
         Create mon0/mon1 for 2G/5G monitor mode and wlan2 for managed mode.
         """
-        if mode == 'monitor':
-            self.ssh.run('ifconfig wlan%s down' % iface[-1],
-                         ignore_status=True)
-        self.ssh.run('iw dev %s del' % iface, ignore_status=True)
-        self.ssh.run('iw phy%s interface add %s type %s' %
-                     (iface[-1], iface, mode),
-                     ignore_status=True)
-        self.ssh.run('ip link set %s up' % iface, ignore_status=True)
-        result = self.ssh.run('iw dev %s info' % iface, ignore_status=True)
+        if mode == "monitor":
+            self.ssh.run("ifconfig wlan%s down" % iface[-1], ignore_status=True)
+        self.ssh.run("iw dev %s del" % iface, ignore_status=True)
+        self.ssh.run(
+            "iw phy%s interface add %s type %s" % (iface[-1], iface, mode),
+            ignore_status=True,
+        )
+        self.ssh.run("ip link set %s up" % iface, ignore_status=True)
+        result = self.ssh.run("iw dev %s info" % iface, ignore_status=True)
         if result.stderr or iface not in result.stdout:
-            raise PacketCaptureError('Failed to configure interface %s' %
-                                     iface)
+            raise PacketCaptureError("Failed to configure interface %s" % iface)
 
     def _cleanup_interface(self, iface):
         """Clean up monitor mode interfaces."""
-        self.ssh.run('iw dev %s del' % iface, ignore_status=True)
-        result = self.ssh.run('iw dev %s info' % iface, ignore_status=True)
-        if not result.stderr or 'No such device' not in result.stderr:
-            raise PacketCaptureError('Failed to cleanup monitor mode for %s' %
-                                     iface)
+        self.ssh.run("iw dev %s del" % iface, ignore_status=True)
+        result = self.ssh.run("iw dev %s info" % iface, ignore_status=True)
+        if not result.stderr or "No such device" not in result.stderr:
+            raise PacketCaptureError("Failed to cleanup monitor mode for %s" % iface)
 
     def _parse_scan_results(self, scan_result):
         """Parses the scan dump output and returns list of dictionaries.
@@ -163,7 +160,7 @@
             if SEP not in line:
                 continue
             if BSS in line:
-                network[BSSID] = line.split('(')[0].split()[-1]
+                network[BSSID] = line.split("(")[0].split()[-1]
             field, value = line.lstrip().rstrip().split(SEP)[0:2]
             value = value.lstrip()
             if SIGNAL in line:
@@ -183,10 +180,10 @@
             List of dictionaries each representing a found network.
         """
         if not self.managed_mode:
-            raise PacketCaptureError('Managed mode not setup')
-        result = self.ssh.run('iw dev %s scan' % SCAN_IFACE)
+            raise PacketCaptureError("Managed mode not setup")
+        result = self.ssh.run("iw dev %s scan" % SCAN_IFACE)
         if result.stderr:
-            raise PacketCaptureError('Failed to get scan dump')
+            raise PacketCaptureError("Failed to get scan dump")
         if not result.stdout:
             return []
         return self._parse_scan_results(result.stdout)
@@ -224,28 +221,29 @@
 
         band = band.upper()
         if band not in BAND_IFACE:
-            self.log.error('Invalid band. Must be 2g/2G or 5g/5G')
+            self.log.error("Invalid band. Must be 2g/2G or 5g/5G")
             return False
 
         iface = BAND_IFACE[band]
         if bandwidth == 20:
-            self.ssh.run('iw dev %s set channel %s' % (iface, channel),
-                         ignore_status=True)
+            self.ssh.run(
+                "iw dev %s set channel %s" % (iface, channel), ignore_status=True
+            )
         else:
             center_freq = None
             for i, j in CENTER_CHANNEL_MAP[VHT_CHANNEL[bandwidth]]["channels"]:
                 if channel in range(i, j + 1):
                     center_freq = (FREQUENCY_MAP[i] + FREQUENCY_MAP[j]) / 2
                     break
-            asserts.assert_true(center_freq,
-                                "No match channel in VHT channel list.")
+            asserts.assert_true(center_freq, "No match channel in VHT channel list.")
             self.ssh.run(
-                'iw dev %s set freq %s %s %s' %
-                (iface, FREQUENCY_MAP[channel], bandwidth, center_freq),
-                ignore_status=True)
+                "iw dev %s set freq %s %s %s"
+                % (iface, FREQUENCY_MAP[channel], bandwidth, center_freq),
+                ignore_status=True,
+            )
 
-        result = self.ssh.run('iw dev %s info' % iface, ignore_status=True)
-        if result.stderr or 'channel %s' % channel not in result.stdout:
+        result = self.ssh.run("iw dev %s info" % iface, ignore_status=True)
+        if result.stderr or "channel %s" % channel not in result.stdout:
             self.log.error("Failed to configure monitor mode for %s" % band)
             return False
         return True
@@ -269,22 +267,19 @@
             self.log.error("Invalid band or packet capture already running")
             return None
 
-        pcap_name = '%s_%s.pcap' % (pcap_fname, band)
+        pcap_name = "%s_%s.pcap" % (pcap_fname, band)
         pcap_fname = os.path.join(log_path, pcap_name)
-        pcap_file = open(pcap_fname, 'w+b')
+        pcap_file = open(pcap_fname, "w+b")
 
-        tcpdump_cmd = 'tcpdump -i %s -w - -U 2>/dev/null' % (BAND_IFACE[band])
-        cmd = formatter.SshFormatter().format_command(tcpdump_cmd,
-                                                      None,
-                                                      self.ssh_settings,
-                                                      extra_flags={'-q': None})
+        tcpdump_cmd = "tcpdump -i %s -w - -U 2>/dev/null" % (BAND_IFACE[band])
+        cmd = formatter.SshFormatter().format_command(
+            tcpdump_cmd, None, self.ssh_settings, extra_flags={"-q": None}
+        )
         pcap_proc = Process(cmd)
-        pcap_proc.set_on_output_callback(lambda msg: pcap_file.write(msg),
-                                         binary=True)
+        pcap_proc.set_on_output_callback(lambda msg: pcap_file.write(msg), binary=True)
         pcap_proc.start()
 
-        self.pcap_properties[band] = PcapProperties(pcap_proc, pcap_fname,
-                                                    pcap_file)
+        self.pcap_properties[band] = PcapProperties(pcap_proc, pcap_fname, pcap_file)
         return pcap_proc
 
     def stop_packet_capture(self, proc):
diff --git a/src/antlion/controllers/packet_sender.py b/src/antlion/controllers/packet_sender.py
index ddd988c..da22e79 100644
--- a/src/antlion/controllers/packet_sender.py
+++ b/src/antlion/controllers/packet_sender.py
@@ -27,32 +27,32 @@
 # On ubuntu, sudo pip3 install scapy
 import scapy.all as scapy
 
-MOBLY_CONTROLLER_CONFIG_NAME = 'PacketSender'
-ACTS_CONTROLLER_REFERENCE_NAME = 'packet_senders'
+MOBLY_CONTROLLER_CONFIG_NAME = "PacketSender"
+ACTS_CONTROLLER_REFERENCE_NAME = "packet_senders"
 
-GET_FROM_LOCAL_INTERFACE = 'get_local'
-MAC_BROADCAST = 'ff:ff:ff:ff:ff:ff'
-IPV4_BROADCAST = '255.255.255.255'
-ARP_DST = '00:00:00:00:00:00'
-RA_MAC = '33:33:00:00:00:01'
-RA_IP = 'ff02::1'
-RA_PREFIX = 'd00d::'
+GET_FROM_LOCAL_INTERFACE = "get_local"
+MAC_BROADCAST = "ff:ff:ff:ff:ff:ff"
+IPV4_BROADCAST = "255.255.255.255"
+ARP_DST = "00:00:00:00:00:00"
+RA_MAC = "33:33:00:00:00:01"
+RA_IP = "ff02::1"
+RA_PREFIX = "d00d::"
 RA_PREFIX_LEN = 64
 DHCP_OFFER_OP = 2
 DHCP_OFFER_SRC_PORT = 67
 DHCP_OFFER_DST_PORT = 68
 DHCP_TRANS_ID = 0x01020304
 DNS_LEN = 3
-PING6_DATA = 'BEST PING6 EVER'
+PING6_DATA = "BEST PING6 EVER"
 PING4_TYPE = 8
 MDNS_TTL = 255
-MDNS_QTYPE = 'PTR'
+MDNS_QTYPE = "PTR"
 MDNS_UDP_PORT = 5353
-MDNS_V4_IP_DST = '224.0.0.251'
-MDNS_V4_MAC_DST = '01:00:5E:00:00:FB'
+MDNS_V4_IP_DST = "224.0.0.251"
+MDNS_V4_MAC_DST = "01:00:5E:00:00:FB"
 MDNS_RECURSIVE = 1
-MDNS_V6_IP_DST = 'FF02::FB'
-MDNS_V6_MAC_DST = '33:33:00:00:00:FB'
+MDNS_V6_IP_DST = "FF02::FB"
+MDNS_V6_MAC_DST = "33:33:00:00:00:FB"
 ETH_TYPE_IP = 2048
 SAP_SPANNING_TREE = 0x42
 SNAP_OUI = 12
@@ -118,18 +118,18 @@
         self.log = log
 
     def run(self):
-        self.log.info('Packet Sending Started.')
+        self.log.info("Packet Sending Started.")
         while True:
             if self.stop_signal.is_set():
                 # Poison pill means shutdown
-                self.log.info('Packet Sending Stopped.')
+                self.log.info("Packet Sending Stopped.")
                 break
 
             try:
                 scapy.sendp(self.packet, iface=self.interface, verbose=0)
                 time.sleep(self.interval)
             except Exception:
-                self.log.exception('Exception when trying to send packet')
+                self.log.exception("Exception when trying to send packet")
                 return
 
         return
@@ -173,14 +173,15 @@
         """
         if packet is None:
             raise PacketSenderError(
-                'There is no packet to send. Create a packet first.')
+                "There is no packet to send. Create a packet first."
+            )
 
         for _ in range(ntimes):
             try:
                 scapy.sendp(packet, iface=self.interface, verbose=0)
                 time.sleep(interval)
             except socket.error as excpt:
-                self.log.exception('Caught socket exception : %s' % excpt)
+                self.log.exception("Caught socket exception : %s" % excpt)
                 return
 
     def send_receive_ntimes(self, packet, ntimes, interval):
@@ -194,15 +195,15 @@
         """
         if packet is None:
             raise PacketSenderError(
-                'There is no packet to send. Create a packet first.')
+                "There is no packet to send. Create a packet first."
+            )
 
         for _ in range(ntimes):
             try:
-                scapy.srp1(
-                    packet, iface=self.interface, timeout=interval, verbose=0)
+                scapy.srp1(packet, iface=self.interface, timeout=interval, verbose=0)
                 time.sleep(interval)
             except socket.error as excpt:
-                self.log.exception('Caught socket exception : %s' % excpt)
+                self.log.exception("Caught socket exception : %s" % excpt)
                 return
 
     def start_sending(self, packet, interval):
@@ -217,28 +218,32 @@
         """
         if packet is None:
             raise PacketSenderError(
-                'There is no packet to send. Create a packet first.')
+                "There is no packet to send. Create a packet first."
+            )
 
         if self.thread_active:
             raise PacketSenderError(
-                ('There is already an active thread. Stop it'
-                 'before starting another transmission.'))
+                (
+                    "There is already an active thread. Stop it"
+                    "before starting another transmission."
+                )
+            )
 
-        self.thread_send = ThreadSendPacket(self.stop_signal, packet, interval,
-                                            self.interface, self.log)
+        self.thread_send = ThreadSendPacket(
+            self.stop_signal, packet, interval, self.interface, self.log
+        )
         self.thread_send.start()
         self.thread_active = True
 
     def stop_sending(self, ignore_status=False):
-        """Stops the concurrent thread that is continuously sending packets.
-
-       """
+        """Stops the concurrent thread that is continuously sending packets."""
         if not self.thread_active:
             if ignore_status:
                 return
             else:
                 raise PacketSenderError(
-                    'Error: There is no acive thread running to stop.')
+                    "Error: There is no acive thread running to stop."
+                )
 
         # Stop thread
         self.stop_signal.set()
@@ -247,7 +252,7 @@
         # Just as precaution
         if self.thread_send.is_alive():
             self.thread_send.terminate()
-            self.log.warning('Packet Sending forced to terminate')
+            self.log.warning("Packet Sending forced to terminate")
 
         self.stop_signal.clear()
         self.thread_send = None
@@ -276,26 +281,28 @@
               The key can also be 'get_local' which means the code will read
               and use the local interface parameters
         """
-        interf = config_params['interf']
+        interf = config_params["interf"]
         self.packet = None
-        if config_params['src_mac'] == GET_FROM_LOCAL_INTERFACE:
+        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
             self.src_mac = scapy.get_if_hwaddr(interf)
         else:
-            self.src_mac = config_params['src_mac']
+            self.src_mac = config_params["src_mac"]
 
-        self.dst_ipv4 = config_params['dst_ipv4']
-        if config_params['src_ipv4'] == GET_FROM_LOCAL_INTERFACE:
+        self.dst_ipv4 = config_params["dst_ipv4"]
+        if config_params["src_ipv4"] == GET_FROM_LOCAL_INTERFACE:
             self.src_ipv4 = scapy.get_if_addr(interf)
         else:
-            self.src_ipv4 = config_params['src_ipv4']
+            self.src_ipv4 = config_params["src_ipv4"]
 
-    def generate(self,
-                 op='who-has',
-                 ip_dst=None,
-                 ip_src=None,
-                 hwsrc=None,
-                 hwdst=None,
-                 eth_dst=None):
+    def generate(
+        self,
+        op="who-has",
+        ip_dst=None,
+        ip_src=None,
+        hwsrc=None,
+        hwdst=None,
+        eth_dst=None,
+    ):
         """Generates a custom ARP packet.
 
         Args:
@@ -307,15 +314,14 @@
             eth_dst: Ethernet (layer 2) destination address (Optional)
         """
         # Create IP layer
-        hw_src = (hwsrc if hwsrc is not None else self.src_mac)
-        hw_dst = (hwdst if hwdst is not None else ARP_DST)
-        ipv4_dst = (ip_dst if ip_dst is not None else self.dst_ipv4)
-        ipv4_src = (ip_src if ip_src is not None else self.src_ipv4)
-        ip4 = scapy.ARP(
-            op=op, pdst=ipv4_dst, psrc=ipv4_src, hwdst=hw_dst, hwsrc=hw_src)
+        hw_src = hwsrc if hwsrc is not None else self.src_mac
+        hw_dst = hwdst if hwdst is not None else ARP_DST
+        ipv4_dst = ip_dst if ip_dst is not None else self.dst_ipv4
+        ipv4_src = ip_src if ip_src is not None else self.src_ipv4
+        ip4 = scapy.ARP(op=op, pdst=ipv4_dst, psrc=ipv4_src, hwdst=hw_dst, hwsrc=hw_src)
 
         # Create Ethernet layer
-        mac_dst = (eth_dst if eth_dst is not None else MAC_BROADCAST)
+        mac_dst = eth_dst if eth_dst is not None else MAC_BROADCAST
         ethernet = scapy.Ether(src=self.src_mac, dst=mac_dst)
 
         self.packet = ethernet / ip4
@@ -347,22 +353,22 @@
               The key can also be 'get_local' which means the code will read
               and use the local interface parameters
         """
-        interf = config_params['interf']
+        interf = config_params["interf"]
         self.packet = None
-        self.subnet_mask = config_params['subnet_mask']
-        self.dst_mac = config_params['dst_mac']
-        if config_params['src_mac'] == GET_FROM_LOCAL_INTERFACE:
+        self.subnet_mask = config_params["subnet_mask"]
+        self.dst_mac = config_params["dst_mac"]
+        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
             self.src_mac = scapy.get_if_hwaddr(interf)
         else:
-            self.src_mac = config_params['src_mac']
+            self.src_mac = config_params["src_mac"]
 
-        self.dst_ipv4 = config_params['dst_ipv4']
-        if config_params['src_ipv4'] == GET_FROM_LOCAL_INTERFACE:
+        self.dst_ipv4 = config_params["dst_ipv4"]
+        if config_params["src_ipv4"] == GET_FROM_LOCAL_INTERFACE:
             self.src_ipv4 = scapy.get_if_addr(interf)
         else:
-            self.src_ipv4 = config_params['src_ipv4']
+            self.src_ipv4 = config_params["src_ipv4"]
 
-        self.gw_ipv4 = config_params['gw_ipv4']
+        self.gw_ipv4 = config_params["gw_ipv4"]
 
     def generate(self, cha_mac=None, dst_ip=None):
         """Generates a DHCP offer packet.
@@ -373,16 +379,18 @@
         """
 
         # Create DHCP layer
-        dhcp = scapy.DHCP(options=[
-            ('message-type', 'offer'),
-            ('subnet_mask', self.subnet_mask),
-            ('server_id', self.src_ipv4),
-            ('end'),
-        ])
+        dhcp = scapy.DHCP(
+            options=[
+                ("message-type", "offer"),
+                ("subnet_mask", self.subnet_mask),
+                ("server_id", self.src_ipv4),
+                ("end"),
+            ]
+        )
 
         # Overwrite standard DHCP fields
-        sta_hw = (cha_mac if cha_mac is not None else self.dst_mac)
-        sta_ip = (dst_ip if dst_ip is not None else self.dst_ipv4)
+        sta_hw = cha_mac if cha_mac is not None else self.dst_mac
+        sta_ip = dst_ip if dst_ip is not None else self.dst_ipv4
 
         # Create Boot
         bootp = scapy.BOOTP(
@@ -391,7 +399,8 @@
             siaddr=self.src_ipv4,
             giaddr=self.gw_ipv4,
             chaddr=scapy.mac2str(sta_hw),
-            xid=DHCP_TRANS_ID)
+            xid=DHCP_TRANS_ID,
+        )
 
         # Create UDP
         udp = scapy.UDP(sport=DHCP_OFFER_SRC_PORT, dport=DHCP_OFFER_DST_PORT)
@@ -429,19 +438,19 @@
               The key can also be 'get_local' which means the code will read
               and use the local interface parameters
         """
-        interf = config_params['interf']
+        interf = config_params["interf"]
         self.packet = None
-        if config_params['src_mac'] == GET_FROM_LOCAL_INTERFACE:
+        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
             self.src_mac = scapy.get_if_hwaddr(interf)
         else:
-            self.src_mac = config_params['src_mac']
+            self.src_mac = config_params["src_mac"]
 
-        self.dst_ipv6 = config_params['dst_ipv6']
-        self.src_ipv6_type = config_params['src_ipv6_type']
-        if config_params['src_ipv6'] == GET_FROM_LOCAL_INTERFACE:
+        self.dst_ipv6 = config_params["dst_ipv6"]
+        self.src_ipv6_type = config_params["src_ipv6_type"]
+        if config_params["src_ipv6"] == GET_FROM_LOCAL_INTERFACE:
             self.src_ipv6 = get_if_addr6(interf, self.src_ipv6_type)
         else:
-            self.src_ipv6 = config_params['src_ipv6']
+            self.src_ipv6 = config_params["src_ipv6"]
 
     def generate(self, ip_dst=None, eth_dst=None):
         """Generates a Neighbor Solicitation (NS) packet (ICMP over IPv6).
@@ -456,8 +465,7 @@
         nnode_mcast = scapy.in6_getnsma(ndst_ip)
         node_mcast = socket.inet_ntop(socket.AF_INET6, nnode_mcast)
         # Compute MAC addresses
-        hw_dst = (eth_dst
-                  if eth_dst is not None else scapy.in6_getnsmac(nnode_mcast))
+        hw_dst = eth_dst if eth_dst is not None else scapy.in6_getnsmac(nnode_mcast)
 
         # Create IPv6 layer
         base = scapy.IPv6(dst=node_mcast, src=self.src_ipv6)
@@ -494,25 +502,22 @@
               The key can also be 'get_local' which means the code will read
               and use the local interface parameters
         """
-        interf = config_params['interf']
+        interf = config_params["interf"]
         self.packet = None
-        if config_params['src_mac'] == GET_FROM_LOCAL_INTERFACE:
+        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
             self.src_mac = scapy.get_if_hwaddr(interf)
         else:
-            self.src_mac = config_params['src_mac']
+            self.src_mac = config_params["src_mac"]
 
-        self.src_ipv6_type = config_params['src_ipv6_type']
-        if config_params['src_ipv6'] == GET_FROM_LOCAL_INTERFACE:
+        self.src_ipv6_type = config_params["src_ipv6_type"]
+        if config_params["src_ipv6"] == GET_FROM_LOCAL_INTERFACE:
             self.src_ipv6 = get_if_addr6(interf, self.src_ipv6_type)
         else:
-            self.src_ipv6 = config_params['src_ipv6']
+            self.src_ipv6 = config_params["src_ipv6"]
 
-    def generate(self,
-                 lifetime,
-                 enableDNS=False,
-                 dns_lifetime=0,
-                 ip_dst=None,
-                 eth_dst=None):
+    def generate(
+        self, lifetime, enableDNS=False, dns_lifetime=0, ip_dst=None, eth_dst=None
+    ):
         """Generates a Router Advertisement (RA) packet (ICMP over IPv6).
 
         Args:
@@ -523,18 +528,18 @@
             eth_dst: Ethernet (layer 2) destination address (Optional)
         """
         # Overwrite standard fields if desired
-        ip6_dst = (ip_dst if ip_dst is not None else RA_IP)
-        hw_dst = (eth_dst if eth_dst is not None else RA_MAC)
+        ip6_dst = ip_dst if ip_dst is not None else RA_IP
+        hw_dst = eth_dst if eth_dst is not None else RA_MAC
 
         # Create IPv6 layer
         base = scapy.IPv6(dst=ip6_dst, src=self.src_ipv6)
         router_solicitation = scapy.ICMPv6ND_RA(routerlifetime=lifetime)
         src_ll_addr = scapy.ICMPv6NDOptSrcLLAddr(lladdr=self.src_mac)
-        prefix = scapy.ICMPv6NDOptPrefixInfo(
-            prefixlen=RA_PREFIX_LEN, prefix=RA_PREFIX)
+        prefix = scapy.ICMPv6NDOptPrefixInfo(prefixlen=RA_PREFIX_LEN, prefix=RA_PREFIX)
         if enableDNS:
             rndss = scapy.ICMPv6NDOptRDNSS(
-                lifetime=dns_lifetime, dns=[self.src_ipv6], len=DNS_LEN)
+                lifetime=dns_lifetime, dns=[self.src_ipv6], len=DNS_LEN
+            )
             ip6 = base / router_solicitation / src_ll_addr / prefix / rndss
         else:
             ip6 = base / router_solicitation / src_ll_addr / prefix
@@ -570,20 +575,20 @@
               The key can also be 'get_local' which means the code will read
               and use the local interface parameters
         """
-        interf = config_params['interf']
+        interf = config_params["interf"]
         self.packet = None
-        self.dst_mac = config_params['dst_mac']
-        if config_params['src_mac'] == GET_FROM_LOCAL_INTERFACE:
+        self.dst_mac = config_params["dst_mac"]
+        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
             self.src_mac = scapy.get_if_hwaddr(interf)
         else:
-            self.src_mac = config_params['src_mac']
+            self.src_mac = config_params["src_mac"]
 
-        self.dst_ipv6 = config_params['dst_ipv6']
-        self.src_ipv6_type = config_params['src_ipv6_type']
-        if config_params['src_ipv6'] == GET_FROM_LOCAL_INTERFACE:
+        self.dst_ipv6 = config_params["dst_ipv6"]
+        self.src_ipv6_type = config_params["src_ipv6_type"]
+        if config_params["src_ipv6"] == GET_FROM_LOCAL_INTERFACE:
             self.src_ipv6 = get_if_addr6(interf, self.src_ipv6_type)
         else:
-            self.src_ipv6 = config_params['src_ipv6']
+            self.src_ipv6 = config_params["src_ipv6"]
 
     def generate(self, ip_dst=None, eth_dst=None):
         """Generates a Ping6 packet (i.e., Echo Request)
@@ -593,8 +598,8 @@
             eth_dst: Ethernet (layer 2) destination address (Optional)
         """
         # Overwrite standard fields if desired
-        ip6_dst = (ip_dst if ip_dst is not None else self.dst_ipv6)
-        hw_dst = (eth_dst if eth_dst is not None else self.dst_mac)
+        ip6_dst = ip_dst if ip_dst is not None else self.dst_ipv6
+        hw_dst = eth_dst if eth_dst is not None else self.dst_mac
 
         # Create IPv6 layer
         base = scapy.IPv6(dst=ip6_dst, src=self.src_ipv6)
@@ -632,19 +637,19 @@
               The key can also be 'get_local' which means the code will read
               and use the local interface parameters
         """
-        interf = config_params['interf']
+        interf = config_params["interf"]
         self.packet = None
-        self.dst_mac = config_params['dst_mac']
-        if config_params['src_mac'] == GET_FROM_LOCAL_INTERFACE:
+        self.dst_mac = config_params["dst_mac"]
+        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
             self.src_mac = scapy.get_if_hwaddr(interf)
         else:
-            self.src_mac = config_params['src_mac']
+            self.src_mac = config_params["src_mac"]
 
-        self.dst_ipv4 = config_params['dst_ipv4']
-        if config_params['src_ipv4'] == GET_FROM_LOCAL_INTERFACE:
+        self.dst_ipv4 = config_params["dst_ipv4"]
+        if config_params["src_ipv4"] == GET_FROM_LOCAL_INTERFACE:
             self.src_ipv4 = scapy.get_if_addr(interf)
         else:
-            self.src_ipv4 = config_params['src_ipv4']
+            self.src_ipv4 = config_params["src_ipv4"]
 
     def generate(self, ip_dst=None, eth_dst=None):
         """Generates a Ping4 packet (i.e., Echo Request)
@@ -655,8 +660,8 @@
         """
 
         # Overwrite standard fields if desired
-        sta_ip = (ip_dst if ip_dst is not None else self.dst_ipv4)
-        sta_hw = (eth_dst if eth_dst is not None else self.dst_mac)
+        sta_ip = ip_dst if ip_dst is not None else self.dst_ipv4
+        sta_hw = eth_dst if eth_dst is not None else self.dst_mac
 
         # Create IPv6 layer
         base = scapy.IP(src=self.src_ipv4, dst=sta_ip)
@@ -693,18 +698,18 @@
               The key can also be 'get_local' which means the code will read
               and use the local interface parameters
         """
-        interf = config_params['interf']
+        interf = config_params["interf"]
         self.packet = None
-        if config_params['src_mac'] == GET_FROM_LOCAL_INTERFACE:
+        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
             self.src_mac = scapy.get_if_hwaddr(interf)
         else:
-            self.src_mac = config_params['src_mac']
+            self.src_mac = config_params["src_mac"]
 
-        self.src_ipv6_type = config_params['src_ipv6_type']
-        if config_params['src_ipv6'] == GET_FROM_LOCAL_INTERFACE:
+        self.src_ipv6_type = config_params["src_ipv6_type"]
+        if config_params["src_ipv6"] == GET_FROM_LOCAL_INTERFACE:
             self.src_ipv6 = get_if_addr6(interf, self.src_ipv6_type)
         else:
-            self.src_ipv6 = config_params['src_ipv6']
+            self.src_ipv6 = config_params["src_ipv6"]
 
     def generate(self, ip_dst=None, eth_dst=None):
         """Generates a mDNS v6 packet for multicast DNS config
@@ -715,8 +720,8 @@
         """
 
         # Overwrite standard fields if desired
-        sta_ip = (ip_dst if ip_dst is not None else MDNS_V6_IP_DST)
-        sta_hw = (eth_dst if eth_dst is not None else MDNS_V6_MAC_DST)
+        sta_ip = ip_dst if ip_dst is not None else MDNS_V6_IP_DST
+        sta_hw = eth_dst if eth_dst is not None else MDNS_V6_MAC_DST
 
         # Create mDNS layer
         qdServer = scapy.DNSQR(qname=self.src_ipv6, qtype=MDNS_QTYPE)
@@ -756,17 +761,17 @@
               The key can also be 'get_local' which means the code will read
               and use the local interface parameters
         """
-        interf = config_params['interf']
+        interf = config_params["interf"]
         self.packet = None
-        if config_params['src_mac'] == GET_FROM_LOCAL_INTERFACE:
+        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
             self.src_mac = scapy.get_if_hwaddr(interf)
         else:
-            self.src_mac = config_params['src_mac']
+            self.src_mac = config_params["src_mac"]
 
-        if config_params['src_ipv4'] == GET_FROM_LOCAL_INTERFACE:
+        if config_params["src_ipv4"] == GET_FROM_LOCAL_INTERFACE:
             self.src_ipv4 = scapy.get_if_addr(interf)
         else:
-            self.src_ipv4 = config_params['src_ipv4']
+            self.src_ipv4 = config_params["src_ipv4"]
 
     def generate(self, ip_dst=None, eth_dst=None):
         """Generates a mDNS v4 packet for multicast DNS config
@@ -777,8 +782,8 @@
         """
 
         # Overwrite standard fields if desired
-        sta_ip = (ip_dst if ip_dst is not None else MDNS_V4_IP_DST)
-        sta_hw = (eth_dst if eth_dst is not None else MDNS_V4_MAC_DST)
+        sta_ip = ip_dst if ip_dst is not None else MDNS_V4_IP_DST
+        sta_hw = eth_dst if eth_dst is not None else MDNS_V4_MAC_DST
 
         # Create mDNS layer
         qdServer = scapy.DNSQR(qname=self.src_ipv4, qtype=MDNS_QTYPE)
@@ -818,13 +823,13 @@
               The key can also be 'get_local' which means the code will read
               and use the local interface parameters
         """
-        interf = config_params['interf']
+        interf = config_params["interf"]
         self.packet = None
-        self.dst_mac = config_params['dst_mac']
-        if config_params['src_mac'] == GET_FROM_LOCAL_INTERFACE:
+        self.dst_mac = config_params["dst_mac"]
+        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
             self.src_mac = scapy.get_if_hwaddr(interf)
         else:
-            self.src_mac = config_params['src_mac']
+            self.src_mac = config_params["src_mac"]
 
     def _build_ether(self, eth_dst=None):
         """Creates the basic frame for 802.3
@@ -833,7 +838,7 @@
             eth_dst: Ethernet (layer 2) destination address (Optional)
         """
         # Overwrite standard fields if desired
-        sta_hw = (eth_dst if eth_dst is not None else self.dst_mac)
+        sta_hw = eth_dst if eth_dst is not None else self.dst_mac
         # Create Ethernet layer
         dot3_base = scapy.Dot3(src=self.src_mac, dst=sta_hw)
 
@@ -847,7 +852,7 @@
         """
         frame.len = PAD_LEN_BYTES
         pad = scapy.Padding()
-        pad.load = '\x00' * PAD_LEN_BYTES
+        pad.load = "\x00" * PAD_LEN_BYTES
         return frame / pad
 
     def generate(self, eth_dst=None):
@@ -881,13 +886,15 @@
         self.packet = self._pad_frame(ethernet / llc)
         return self.packet
 
-    def generate_snap(self,
-                      eth_dst=None,
-                      dsap=SNAP_DSAP,
-                      ssap=SNAP_SSAP,
-                      ctrl=SNAP_CTRL,
-                      oui=SNAP_OUI,
-                      code=ETH_TYPE_IP):
+    def generate_snap(
+        self,
+        eth_dst=None,
+        dsap=SNAP_DSAP,
+        ssap=SNAP_SSAP,
+        ctrl=SNAP_CTRL,
+        oui=SNAP_OUI,
+        code=ETH_TYPE_IP,
+    ):
         """Generates the 802.3 frame with LLC and SNAP and adds padding
 
         Args:
@@ -932,4 +939,3 @@
             return if_list[0]
 
     return None
-
diff --git a/src/antlion/controllers/pdu.py b/src/antlion/controllers/pdu.py
index d167afa..412742e 100644
--- a/src/antlion/controllers/pdu.py
+++ b/src/antlion/controllers/pdu.py
@@ -19,8 +19,8 @@
 
 from antlion import tracelogger
 
-MOBLY_CONTROLLER_CONFIG_NAME = 'PduDevice'
-ACTS_CONTROLLER_REFERENCE_NAME = 'pdu_devices'
+MOBLY_CONTROLLER_CONFIG_NAME = "PduDevice"
+ACTS_CONTROLLER_REFERENCE_NAME = "pdu_devices"
 
 
 def create(configs):
@@ -40,15 +40,15 @@
     if configs:
         pdus = []
         for config in configs:
-            device = config.get('device')
+            device = config.get("device")
             if not device:
                 raise PduError("Config must provide a device")
 
-            host = config.get('host')
+            host = config.get("host")
             if not device:
                 raise PduError("Config must provide a host ip address")
-            username = config.get('username')
-            password = config.get('password')
+            username = config.get("username")
+            password = config.get("password")
             pdu = _create_device(device, host, username, password)
             pdus.append(pdu)
         return pdus
@@ -77,11 +77,9 @@
     """
     info = []
     for pdu in pdu_list:
-        info.append({
-            'host': pdu.host,
-            'username': pdu.username,
-            'password': pdu.password
-        })
+        info.append(
+            {"host": pdu.host, "username": pdu.username, "password": pdu.password}
+        )
     return info
 
 
@@ -89,7 +87,7 @@
     """Factory method that returns an instance of PduDevice implementation
     based on the device string.
     """
-    module_name = 'antlion.controllers.pdu_lib.' + device
+    module_name = "antlion.controllers.pdu_lib." + device
     module = importlib.import_module(module_name)
     return module.PduDevice(host, username, password)
 
@@ -143,12 +141,12 @@
         ],
         ...
     """
-    pdu_ip = device_pdu_config['host']
-    port = device_pdu_config['port']
+    pdu_ip = device_pdu_config["host"]
+    port = device_pdu_config["port"]
     for pdu in pdus:
         if pdu.host == pdu_ip:
             return pdu, port
-    raise ValueError('No PduDevice with host: %s' % pdu_ip)
+    raise ValueError("No PduDevice with host: %s" % pdu_ip)
 
 
 class PduDevice(object):
@@ -163,8 +161,7 @@
 
     def __init__(self, host, username, password):
         if type(self) is PduDevice:
-            raise NotImplementedError(
-                "Base class: cannot be instantiated directly")
+            raise NotImplementedError("Base class: cannot be instantiated directly")
         self.host = host
         self.username = username
         self.password = password
diff --git a/src/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py b/src/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py
index 8a4c6d4..1154f95 100644
--- a/src/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py
+++ b/src/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py
@@ -24,6 +24,7 @@
 # TODO(b/246999212): Explore alternatives to the dlipower package
 try:
     import dlipower
+
     HAS_IMPORT_DLIPOWER = True
 except ImportError:
     HAS_IMPORT_DLIPOWER = False
@@ -51,32 +52,34 @@
 
         if not HAS_IMPORT_DLIPOWER:
             raise signals.ControllerError(
-                'Digital Loggers PDUs are not supported with current installed '
-                'packages; install the dlipower package to add support')
+                "Digital Loggers PDUs are not supported with current installed "
+                "packages; install the dlipower package to add support"
+            )
 
-        self.power_switch = dlipower.PowerSwitch(hostname=host,
-                                                 userid=username,
-                                                 password=password)
+        self.power_switch = dlipower.PowerSwitch(
+            hostname=host, userid=username, password=password
+        )
         # Connection is made at command execution, this verifies the device
         # can be reached before continuing.
         if not self.power_switch.statuslist():
             raise pdu.PduError(
-                'Failed to connect get WebPowerSwitch status. Incorrect host, '
-                'userid, or password?')
+                "Failed to connect get WebPowerSwitch status. Incorrect host, "
+                "userid, or password?"
+            )
         else:
-            self.log.info('Connected to WebPowerSwitch (%s).' % host)
+            self.log.info("Connected to WebPowerSwitch (%s)." % host)
 
     def on_all(self):
         """Turn on power to all outlets."""
         for outlet in self.power_switch:
             outlet.on()
-            self._verify_state(outlet.name, 'ON')
+            self._verify_state(outlet.name, "ON")
 
     def off_all(self):
         """Turn off power to all outlets."""
         for outlet in self.power_switch:
             outlet.off()
-            self._verify_state(outlet.name, 'OFF')
+            self._verify_state(outlet.name, "OFF")
 
     def on(self, outlet):
         """Turn on power to given outlet
@@ -84,8 +87,8 @@
         Args:
             outlet: string or int, the outlet name/number
         """
-        self.power_switch.command_on_outlets('on', str(outlet))
-        self._verify_state(outlet, 'ON')
+        self.power_switch.command_on_outlets("on", str(outlet))
+        self._verify_state(outlet, "ON")
 
     def off(self, outlet):
         """Turn off power to given outlet
@@ -93,8 +96,8 @@
         Args:
             outlet: string or int, the outlet name/number
         """
-        self.power_switch.command_on_outlets('off', str(outlet))
-        self._verify_state(outlet, 'OFF')
+        self.power_switch.command_on_outlets("off", str(outlet))
+        self._verify_state(outlet, "OFF")
 
     def reboot(self, outlet):
         """Cycle the given outlet to OFF and back ON.
@@ -102,11 +105,11 @@
         Args:
             outlet: string or int, the outlet name/number
         """
-        self.power_switch.command_on_outlets('cycle', str(outlet))
-        self._verify_state(outlet, 'ON')
+        self.power_switch.command_on_outlets("cycle", str(outlet))
+        self._verify_state(outlet, "ON")
 
     def status(self):
-        """ Return the status of the switch outlets.
+        """Return the status of the switch outlets.
 
         Return:
             a dict mapping outlet string numbers to:
@@ -114,7 +117,7 @@
                 False if outlet is OFF
         """
         status_list = self.power_switch.statuslist()
-        return {str(outlet): state == 'ON' for outlet, _, state in status_list}
+        return {str(outlet): state == "ON" for outlet, _, state in status_list}
 
     def close(self):
         # Since there isn't a long-running connection, close is not applicable.
@@ -141,10 +144,11 @@
             if actual_state == expected_state:
                 return
             else:
-                self.log.debug('Outlet %s not yet in state %s' %
-                               (outlet, expected_state))
+                self.log.debug(
+                    "Outlet %s not yet in state %s" % (outlet, expected_state)
+                )
         raise pdu.PduError(
-            'Outlet %s on WebPowerSwitch (%s) failed to reach expected state. \n'
-            'Expected State: %s\n'
-            'Actual State: %s' %
-            (outlet, self.host, expected_state, actual_state))
+            "Outlet %s on WebPowerSwitch (%s) failed to reach expected state. \n"
+            "Expected State: %s\n"
+            "Actual State: %s" % (outlet, self.host, expected_state, actual_state)
+        )
diff --git a/src/antlion/controllers/pdu_lib/synaccess/np02b.py b/src/antlion/controllers/pdu_lib/synaccess/np02b.py
index ccc7e74..70624f0 100644
--- a/src/antlion/controllers/pdu_lib/synaccess/np02b.py
+++ b/src/antlion/controllers/pdu_lib/synaccess/np02b.py
@@ -26,40 +26,41 @@
     """Implementation of pure abstract PduDevice object for the Synaccess np02b
     Pdu.
     """
+
     def __init__(self, host, username, password):
         super(PduDevice, self).__init__(host, username, password)
         self.tnhelper = _TNHelperNP02B(host)
 
     def on_all(self):
-        """ Turns on both outlets on the np02b."""
-        self.tnhelper.cmd('ps 1')
-        self._verify_state({'1': True, '2': True})
+        """Turns on both outlets on the np02b."""
+        self.tnhelper.cmd("ps 1")
+        self._verify_state({"1": True, "2": True})
 
     def off_all(self):
-        """ Turns off both outlets on the np02b."""
-        self.tnhelper.cmd('ps 0')
-        self._verify_state({'1': False, '2': False})
+        """Turns off both outlets on the np02b."""
+        self.tnhelper.cmd("ps 0")
+        self._verify_state({"1": False, "2": False})
 
     def on(self, outlet):
-        """ Turns on specific outlet on the np02b.
+        """Turns on specific outlet on the np02b.
 
         Args:
             outlet: string of the outlet to turn on ('1' or '2')
         """
-        self.tnhelper.cmd('pset %s 1' % outlet)
+        self.tnhelper.cmd("pset %s 1" % outlet)
         self._verify_state({outlet: True})
 
     def off(self, outlet):
-        """ Turns off a specifc outlet on the np02b.
+        """Turns off a specifc outlet on the np02b.
 
         Args:
             outlet: string of the outlet to turn off ('1' or '2')
         """
-        self.tnhelper.cmd('pset %s 0' % outlet)
+        self.tnhelper.cmd("pset %s 0" % outlet)
         self._verify_state({outlet: False})
 
     def reboot(self, outlet):
-        """ Toggles a specific outlet on the np02b to off, then to on.
+        """Toggles a specific outlet on the np02b to off, then to on.
 
         Args:
             outlet: string of the outlet to reboot ('1' or '2')
@@ -70,18 +71,18 @@
         self._verify_state({outlet: True})
 
     def status(self):
-        """ Returns the status of the np02b outlets.
+        """Returns the status of the np02b outlets.
 
         Return:
             a dict mapping outlet strings ('1' and '2') to:
                 True if outlet is ON
                 False if outlet is OFF
         """
-        res = self.tnhelper.cmd('pshow')
-        status_list = re.findall('(ON|OFF)', res)
+        res = self.tnhelper.cmd("pshow")
+        status_list = re.findall("(ON|OFF)", res)
         status_dict = {}
         for i, status in enumerate(status_list):
-            status_dict[str(i + 1)] = (status == 'ON')
+            status_dict[str(i + 1)] = status == "ON"
         return status_dict
 
     def close(self):
@@ -118,10 +119,12 @@
             actual_state = self.status()
             if expected_state.items() <= actual_state.items():
                 return True
-            time.sleep(.1)
-        raise pdu.PduError('Timeout while verifying state.\n'
-                           'Expected State: %s\n'
-                           'Actual State: %s' % (expected_state, actual_state))
+            time.sleep(0.1)
+        raise pdu.PduError(
+            "Timeout while verifying state.\n"
+            "Expected State: %s\n"
+            "Actual State: %s" % (expected_state, actual_state)
+        )
 
 
 class _TNHelperNP02B(object):
@@ -129,12 +132,13 @@
     helper is specific to the idiosyncrasies of the NP02B and therefore should
     not be used with other devices.
     """
+
     def __init__(self, host):
         self._tn = telnetlib.Telnet()
         self.host = host
-        self.tx_cmd_separator = '\n\r'
-        self.rx_cmd_separator = '\r\n'
-        self.prompt = '>'
+        self.tx_cmd_separator = "\n\r"
+        self.rx_cmd_separator = "\r\n"
+        self.prompt = ">"
 
     """
     Executes a command on the device via telnet.
@@ -149,14 +153,13 @@
         try:
             self._tn.open(self.host, timeout=3)
         except:
-            raise pdu.PduError("Failed to open telnet session to host (%s)" %
-                               self.host)
-        time.sleep(.1)
+            raise pdu.PduError("Failed to open telnet session to host (%s)" % self.host)
+        time.sleep(0.1)
 
         # Read to end of first prompt
         cmd_str.strip(self.tx_cmd_separator)
         self._tn.read_eager()
-        time.sleep(.1)
+        time.sleep(0.1)
 
         # Write command and read all output text
         self._tn.write(utils.ascii_string(cmd_str + self.tx_cmd_separator))
@@ -166,12 +169,12 @@
         if res is None:
             raise pdu.PduError("Command failed: %s" % cmd_str)
         res = res.decode()
-        if re.search('Invalid', res):
+        if re.search("Invalid", res):
             raise pdu.PduError("Command Invalid: %s" % cmd_str)
-        res = res.replace(self.prompt, '')
-        res = res.replace(self.tx_cmd_separator, '')
-        res = res.replace(self.rx_cmd_separator, '')
-        res = res.replace(cmd_str, '')
+        res = res.replace(self.prompt, "")
+        res = res.replace(self.tx_cmd_separator, "")
+        res = res.replace(self.rx_cmd_separator, "")
+        res = res.replace(cmd_str, "")
 
         # Close session
         self._tn.close()
diff --git a/src/antlion/controllers/power_metrics.py b/src/antlion/controllers/power_metrics.py
deleted file mode 100644
index 0c306ce..0000000
--- a/src/antlion/controllers/power_metrics.py
+++ /dev/null
@@ -1,339 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import math
-import numpy as np
-
-# Metrics timestamp keys
-START_TIMESTAMP = 'start'
-END_TIMESTAMP = 'end'
-
-# Unit type constants
-CURRENT = 'current'
-POWER = 'power'
-TIME = 'time'
-VOLTAGE = 'voltage'
-
-# Unit constants
-MILLIVOLT = 'mV'
-VOLT = 'V'
-MILLIAMP = 'mA'
-AMP = 'A'
-AMPERE = AMP
-MILLIWATT = 'mW'
-WATT = 'W'
-MILLISECOND = 'ms'
-SECOND = 's'
-MINUTE = 'm'
-HOUR = 'h'
-
-CONVERSION_TABLES = {
-    CURRENT: {
-        MILLIAMP: 0.001,
-        AMP: 1
-    },
-    POWER: {
-        MILLIWATT: 0.001,
-        WATT: 1
-    },
-    TIME: {
-        MILLISECOND: 0.001,
-        SECOND: 1,
-        MINUTE: 60,
-        HOUR: 3600
-    },
-    VOLTAGE: {
-        MILLIVOLT: 0.001,
-        VOLT : 1
-    }
-}
-
-
-class Metric(object):
-    """Base class for describing power measurement values. Each object contains
-    an value and a unit. Enables some basic arithmetic operations with other
-    measurements of the same unit type.
-
-    Attributes:
-        value: Numeric value of the measurement
-        _unit_type: Unit type of the measurement (e.g. current, power)
-        unit: Unit of the measurement (e.g. W, mA)
-    """
-
-    def __init__(self, value, unit_type, unit, name=None):
-        if unit_type not in CONVERSION_TABLES:
-            raise TypeError(
-                '%s is not a valid unit type, valid unit types are %s' % (
-                    unit_type, str(CONVERSION_TABLES.keys)))
-        self.value = value
-        self.unit = unit
-        self.name = name
-        self._unit_type = unit_type
-
-    # Convenience constructor methods
-    @staticmethod
-    def amps(amps, name=None):
-        """Create a new current measurement, in amps."""
-        return Metric(amps, CURRENT, AMP, name=name)
-
-    @staticmethod
-    def watts(watts, name=None):
-        """Create a new power measurement, in watts."""
-        return Metric(watts, POWER, WATT, name=name)
-
-    @staticmethod
-    def seconds(seconds, name=None):
-        """Create a new time measurement, in seconds."""
-        return Metric(seconds, TIME, SECOND, name=name)
-
-    # Comparison methods
-
-    def __eq__(self, other):
-        return self.value == other.to_unit(self.unit).value
-
-    def __lt__(self, other):
-        return self.value < other.to_unit(self.unit).value
-
-    def __le__(self, other):
-        return self == other or self < other
-
-    # Addition and subtraction with other measurements
-
-    def __add__(self, other):
-        """Adds measurements of compatible unit types. The result will be in the
-        same units as self.
-        """
-        return Metric(self.value + other.to_unit(self.unit).value,
-                      self._unit_type, self.unit, name=self.name)
-
-    def __sub__(self, other):
-        """Subtracts measurements of compatible unit types. The result will be
-        in the same units as self.
-        """
-        return Metric(self.value - other.to_unit(self.unit).value,
-                      self._unit_type, self.unit, name=self.name)
-
-    # String representation
-
-    def __str__(self):
-        return '%g%s' % (self.value, self.unit)
-
-    def __repr__(self):
-        return str(self)
-
-    def to_unit(self, new_unit):
-        """Create an equivalent measurement under a different unit.
-        e.g. 0.5W -> 500mW
-
-        Args:
-            new_unit: Target unit. Must be compatible with current unit.
-
-        Returns: A new measurement with the converted value and unit.
-        """
-        try:
-            new_value = self.value * (
-                CONVERSION_TABLES[self._unit_type][self.unit] /
-                CONVERSION_TABLES[self._unit_type][new_unit])
-        except KeyError:
-            raise TypeError('Incompatible units: %s, %s' %
-                            (self.unit, new_unit))
-        return Metric(new_value, self._unit_type, new_unit, self.name)
-
-
-def import_raw_data(path):
-    """Create a generator from a Monsoon data file.
-
-    Args:
-        path: path to raw data file
-
-    Returns: generator that yields (timestamp, sample) per line
-    """
-    with open(path, 'r') as f:
-        for line in f:
-            time, sample = line.split()
-            yield float(time[:-1]), float(sample)
-
-
-def generate_percentiles(monsoon_file, timestamps, percentiles):
-    """Generates metrics .
-
-    Args:
-        monsoon_file: monsoon-like file where each line has two
-            numbers separated by a space, in the format:
-            seconds_since_epoch amperes
-            seconds_since_epoch amperes
-        timestamps: dict following the output format of
-            instrumentation_proto_parser.get_test_timestamps()
-        percentiles: percentiles to be returned
-    """
-    if timestamps is None:
-        timestamps = {}
-    test_starts = {}
-    test_ends = {}
-    for seg_name, times in timestamps.items():
-        if START_TIMESTAMP in times and END_TIMESTAMP in times:
-            test_starts[seg_name] = Metric(
-                times[START_TIMESTAMP], TIME, MILLISECOND).to_unit(
-                SECOND).value
-            test_ends[seg_name] = Metric(
-                times[END_TIMESTAMP], TIME, MILLISECOND).to_unit(
-                SECOND).value
-
-    arrays = {}
-    for seg_name in test_starts:
-        arrays[seg_name] = []
-
-    with open(monsoon_file, 'r') as m:
-        for line in m:
-            timestamp = float(line.strip().split()[0])
-            value = float(line.strip().split()[1])
-            for seg_name in arrays.keys():
-                if test_starts[seg_name] <= timestamp <= test_ends[seg_name]:
-                    arrays[seg_name].append(value)
-
-    results = {}
-    for seg_name in arrays:
-        if len(arrays[seg_name]) == 0:
-            continue
-
-        pairs = zip(percentiles, np.percentile(arrays[seg_name],
-                                               percentiles))
-        results[seg_name] = [
-            Metric.amps(p[1], 'percentile_%s' % p[0]).to_unit(MILLIAMP) for p in
-            pairs
-        ]
-    return results
-
-
-def generate_test_metrics(raw_data, timestamps=None,
-                          voltage=None):
-    """Split the data into individual test metrics, based on the timestamps
-    given as a dict.
-
-    Args:
-        raw_data: raw data as list or generator of (timestamp, sample)
-        timestamps: dict following the output format of
-            instrumentation_proto_parser.get_test_timestamps()
-        voltage: voltage used during measurements
-    """
-
-    # Initialize metrics for each test
-    if timestamps is None:
-        timestamps = {}
-    test_starts = {}
-    test_ends = {}
-    test_metrics = {}
-    for seg_name, times in timestamps.items():
-        if START_TIMESTAMP in times and END_TIMESTAMP in times:
-            test_metrics[seg_name] = PowerMetrics(voltage)
-            test_starts[seg_name] = Metric(
-                times[START_TIMESTAMP], TIME, MILLISECOND).to_unit(
-                SECOND).value
-            test_ends[seg_name] = Metric(
-                times[END_TIMESTAMP], TIME, MILLISECOND).to_unit(
-                SECOND).value
-
-    # Assign data to tests based on timestamps
-    for timestamp, amps in raw_data:
-        for seg_name in test_metrics.keys():
-            if test_starts[seg_name] <= timestamp <= test_ends[seg_name]:
-                test_metrics[seg_name].update_metrics(amps)
-
-    result = {}
-    for seg_name, power_metrics in test_metrics.items():
-        result[seg_name] = [
-            power_metrics.avg_current,
-            power_metrics.max_current,
-            power_metrics.min_current,
-            power_metrics.stdev_current,
-            power_metrics.avg_power]
-    return result
-
-
-class PowerMetrics(object):
-    """Class for processing raw power metrics generated by Monsoon measurements.
-    Provides useful metrics such as average current, max current, and average
-    power. Can generate individual test metrics.
-
-    See section "Numeric metrics" below for available metrics.
-    """
-
-    def __init__(self, voltage):
-        """Create a PowerMetrics.
-
-        Args:
-            voltage: Voltage of the measurement
-        """
-        self._voltage = voltage
-        self._num_samples = 0
-        self._sum_currents = 0
-        self._sum_squares = 0
-        self._max_current = None
-        self._min_current = None
-        self.test_metrics = {}
-
-    def update_metrics(self, sample):
-        """Update the running metrics with the current sample.
-
-        Args:
-            sample: A current sample in Amps.
-        """
-        self._num_samples += 1
-        self._sum_currents += sample
-        self._sum_squares += sample ** 2
-        if self._max_current is None or sample > self._max_current:
-            self._max_current = sample
-        if self._min_current is None or sample < self._min_current:
-            self._min_current = sample
-
-    # Numeric metrics
-    @property
-    def avg_current(self):
-        """Average current, in milliamps."""
-        if not self._num_samples:
-            return Metric.amps(0).to_unit(MILLIAMP)
-        return (Metric.amps(self._sum_currents / self._num_samples,
-                            'avg_current')
-                .to_unit(MILLIAMP))
-
-    @property
-    def max_current(self):
-        """Max current, in milliamps."""
-        return Metric.amps(self._max_current or 0, 'max_current').to_unit(
-            MILLIAMP)
-
-    @property
-    def min_current(self):
-        """Min current, in milliamps."""
-        return Metric.amps(self._min_current or 0, 'min_current').to_unit(
-            MILLIAMP)
-
-    @property
-    def stdev_current(self):
-        """Standard deviation of current values, in milliamps."""
-        if self._num_samples < 2:
-            return Metric.amps(0, 'stdev_current').to_unit(MILLIAMP)
-        stdev = math.sqrt(
-            (self._sum_squares - (
-                self._num_samples * self.avg_current.to_unit(AMP).value ** 2))
-            / (self._num_samples - 1))
-        return Metric.amps(stdev, 'stdev_current').to_unit(MILLIAMP)
-
-    @property
-    def avg_power(self):
-        """Average power, in milliwatts."""
-        return Metric.watts(self.avg_current.to_unit(AMP).value * self._voltage,
-                            'avg_power').to_unit(MILLIWATT)
diff --git a/src/antlion/controllers/power_monitor.py b/src/antlion/controllers/power_monitor.py
deleted file mode 100644
index 6a229f7..0000000
--- a/src/antlion/controllers/power_monitor.py
+++ /dev/null
@@ -1,215 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import tempfile
-
-from antlion.controllers import power_metrics
-from antlion.controllers.monsoon_lib.api.common import MonsoonError
-
-
-class ResourcesRegistryError(Exception):
-    pass
-
-
-_REGISTRY = {}
-
-
-def update_registry(registry):
-    """Updates the registry with the one passed.
-
-    Overriding a previous value is not allowed.
-
-    Args:
-        registry: A dictionary.
-    Raises:
-        ResourceRegistryError if a property is updated with a different value.
-    """
-    for k, v in registry.items():
-        if k in _REGISTRY:
-            if v == _REGISTRY[k]:
-                continue
-            raise ResourcesRegistryError(
-                'Overwriting resources_registry fields is not allowed. %s was '
-                'already defined as %s and was attempted to be overwritten '
-                'with %s.' % (k, _REGISTRY[k], v))
-        _REGISTRY[k] = v
-
-
-def get_registry():
-    return _REGISTRY
-
-
-def _write_raw_data_in_standard_format(raw_data, path, start_time):
-    """Writes the raw data to a file in (seconds since epoch, amps).
-
-    TODO(b/155294049): Deprecate this once Monsoon controller output
-        format is updated.
-
-    Args:
-        start_time: Measurement start time in seconds since epoch
-        raw_data: raw data as list or generator of (timestamp, sample)
-        path: path to write output
-    """
-    with open(path, 'w') as f:
-        for timestamp, amps in raw_data:
-            f.write('%s %s\n' %
-                    (timestamp + start_time, amps))
-
-
-class BasePowerMonitor(object):
-
-    def setup(self, **kwargs):
-        raise NotImplementedError()
-
-    def connect_usb(self, **kwargs):
-        raise NotImplementedError()
-
-    def measure(self, **kwargs):
-        raise NotImplementedError()
-
-    def release_resources(self, **kwargs):
-        raise NotImplementedError()
-
-    def disconnect_usb(self, **kwargs):
-        raise NotImplementedError()
-
-    def get_metrics(self, **kwargs):
-        raise NotImplementedError()
-
-    def get_waveform(self, **kwargs):
-        raise NotImplementedError()
-
-    def teardown(self, **kwargs):
-        raise NotImplementedError()
-
-
-class PowerMonitorMonsoonFacade(BasePowerMonitor):
-
-    def __init__(self, monsoon):
-        """Constructs a PowerMonitorFacade.
-
-        Args:
-            monsoon: delegate monsoon object, either
-                antlion.controllers.monsoon_lib.api.hvpm.monsoon.Monsoon or
-                antlion.controllers.monsoon_lib.api.lvpm_stock.monsoon.Monsoon.
-        """
-        self.monsoon = monsoon
-        self._log = logging.getLogger()
-
-    def setup(self, monsoon_config=None, **__):
-        """Set up the Monsoon controller for this testclass/testcase."""
-
-        if monsoon_config is None:
-            raise MonsoonError('monsoon_config can not be None')
-
-        self._log.info('Setting up Monsoon %s' % self.monsoon.serial)
-        voltage = monsoon_config.get_numeric('voltage', 4.2)
-        self.monsoon.set_voltage_safe(voltage)
-        if 'max_current' in monsoon_config:
-            self.monsoon.set_max_current(
-                monsoon_config.get_numeric('max_current'))
-
-    def power_cycle(self, monsoon_config=None, **__):
-        """Power cycles the delegated monsoon controller."""
-
-        if monsoon_config is None:
-            raise MonsoonError('monsoon_config can not be None')
-
-        self._log.info('Setting up Monsoon %s' % self.monsoon.serial)
-        voltage = monsoon_config.get_numeric('voltage', 4.2)
-        self._log.info('Setting up Monsoon voltage %s' % voltage)
-        self.monsoon.set_voltage_safe(0)
-        if 'max_current' in monsoon_config:
-            self.monsoon.set_max_current(
-                monsoon_config.get_numeric('max_current'))
-            self.monsoon.set_max_initial_current(
-                monsoon_config.get_numeric('max_current'))
-        self.connect_usb()
-        self.monsoon.set_voltage_safe(voltage)
-
-    def connect_usb(self, **__):
-        self.monsoon.usb('on')
-
-    def measure(self, measurement_args=None, start_time=None,
-                monsoon_output_path=None, **__):
-        if measurement_args is None:
-            raise MonsoonError('measurement_args can not be None')
-
-        with tempfile.NamedTemporaryFile(prefix='monsoon_') as tmon:
-            self.monsoon.measure_power(**measurement_args,
-                                       output_path=tmon.name)
-
-            if monsoon_output_path and start_time is not None:
-                _write_raw_data_in_standard_format(
-                    power_metrics.import_raw_data(tmon.name),
-                    monsoon_output_path, start_time)
-
-    def release_resources(self, **__):
-        # nothing to do
-        pass
-
-    def disconnect_usb(self, **__):
-        self.monsoon.usb('off')
-
-    def get_waveform(self, file_path=None):
-        """Parses a file to obtain all current (in amps) samples.
-
-        Args:
-            file_path: Path to a monsoon file.
-
-        Returns:
-            A list of tuples in which the first element is a timestamp and the
-            second element is the sampled current at that time.
-        """
-        if file_path is None:
-            raise MonsoonError('file_path can not be None')
-
-        return list(power_metrics.import_raw_data(file_path))
-
-    def get_metrics(self, start_time=None, voltage=None, monsoon_file_path=None,
-                    timestamps=None, **__):
-        """Parses a monsoon_file_path to compute the consumed power and other
-        power related metrics.
-
-        Args:
-            start_time: Time when the measurement started, this is used to
-                correlate timestamps from the device and from the power samples.
-            voltage: Voltage used when the measurement started. Used to compute
-                power from current.
-            monsoon_file_path: Path to a monsoon file.
-            timestamps: Named timestamps delimiting the segments of interest.
-            **__:
-
-        Returns:
-            A list of power_metrics.Metric.
-        """
-        if start_time is None:
-            raise MonsoonError('start_time can not be None')
-        if voltage is None:
-            raise MonsoonError('voltage can not be None')
-        if monsoon_file_path is None:
-            raise MonsoonError('monsoon_file_path can not be None')
-        if timestamps is None:
-            raise MonsoonError('timestamps can not be None')
-
-        return power_metrics.generate_test_metrics(
-            power_metrics.import_raw_data(monsoon_file_path),
-            timestamps=timestamps, voltage=voltage)
-
-    def teardown(self, **__):
-        # nothing to do
-        pass
diff --git a/src/antlion/controllers/relay_device_controller.py b/src/antlion/controllers/relay_device_controller.py
deleted file mode 100644
index 158e484..0000000
--- a/src/antlion/controllers/relay_device_controller.py
+++ /dev/null
@@ -1,102 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-
-from antlion.controllers.relay_lib.relay_rig import RelayRig
-
-MOBLY_CONTROLLER_CONFIG_NAME = "RelayDevice"
-ACTS_CONTROLLER_REFERENCE_NAME = "relay_devices"
-
-
-def create(config):
-    """Creates RelayDevice controller objects.
-
-        Args:
-            config: Either one of two types:
-
-            A filename to a RelayController config (json file)
-            A RelayController config/dict composed of:
-                boards: A list of controller boards (see tests).
-                devices: A list of RelayDevices attached to the boards.
-
-        Returns:
-                A list of RelayDevice objects.
-    """
-    if type(config) is str:
-        return _create_from_external_config_file(config)
-    elif type(config) is dict:
-        return _create_from_dict(config)
-
-
-def _create_from_external_config_file(config_filename):
-    """Creates RelayDevice controller objects from an external config file.
-
-    Args:
-        config_filename: The filename of the RelayController config.
-
-    Returns:
-        A list of RelayDevice objects.
-    """
-    with open(config_filename) as json_file:
-        return _create_from_dict(json.load(json_file))
-
-
-def _create_from_dict(config):
-    """Creates RelayDevice controller objects from a dictionary.
-
-    Args:
-        config: The dictionary containing the RelayController config.
-
-    Returns:
-        A list of RelayDevice objects.
-    """
-    devices = list()
-
-    relay_rig = RelayRig(config)
-    for device in relay_rig.devices.values():
-        devices.append(device)
-
-    return devices
-
-
-def destroy(relay_devices):
-    """Cleans up RelayDevice objects.
-
-        Args:
-            relay_devices: A list of AndroidDevice objects.
-    """
-    for device in relay_devices:
-        device.clean_up()
-
-
-def get_info(relay_devices):
-    """Get information on a list of RelayDevice objects.
-
-    Args:
-        relay_devices: A list of RelayDevice objects.
-
-    Returns:
-        A list of dict, each representing info for an RelayDevice objects.
-    """
-    device_info = []
-    for device in relay_devices:
-        relay_ids = list()
-        for relay in device.relays:
-            relay_ids.append(relay)
-        info = {"name": device.name, "relays": relay_ids}
-        device_info.append(info)
-    return device_info
diff --git a/src/antlion/controllers/relay_lib/__init__.py b/src/antlion/controllers/relay_lib/__init__.py
deleted file mode 100644
index 7f1a899..0000000
--- a/src/antlion/controllers/relay_lib/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/src/antlion/controllers/relay_lib/ak_xb10_speaker.py b/src/antlion/controllers/relay_lib/ak_xb10_speaker.py
deleted file mode 100644
index 465cf92..0000000
--- a/src/antlion/controllers/relay_lib/ak_xb10_speaker.py
+++ /dev/null
@@ -1,61 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import enum
-import time
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-PAIRING_MODE_WAIT_TIME = 5
-POWER_ON_WAIT_TIME = 2
-POWER_OFF_WAIT_TIME = 6
-
-
-class Buttons(enum.Enum):
-    POWER = 'Power'
-    PAIR = 'Pair'
-
-
-class AkXB10Speaker(BluetoothRelayDevice):
-    """A&K XB10 Bluetooth Speaker model
-
-    Wraps the button presses, as well as the special features like pairing.
-    """
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def _hold_button(self, button, seconds):
-        self.hold_down(button.value)
-        time.sleep(seconds)
-        self.release(button.value)
-
-    def power_on(self):
-        self._hold_button(Buttons.POWER, POWER_ON_WAIT_TIME)
-
-    def power_off(self):
-        self._hold_button(Buttons.POWER, POWER_OFF_WAIT_TIME)
-
-    def enter_pairing_mode(self):
-        self._hold_button(Buttons.PAIR, PAIRING_MODE_WAIT_TIME)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-
-    def clean_up(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.clean_up(self)
diff --git a/src/antlion/controllers/relay_lib/devices/__init__.py b/src/antlion/controllers/relay_lib/devices/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/relay_lib/devices/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/relay_lib/devices/bluetooth_relay_device.py b/src/antlion/controllers/relay_lib/devices/bluetooth_relay_device.py
deleted file mode 100644
index 369f72d..0000000
--- a/src/antlion/controllers/relay_lib/devices/bluetooth_relay_device.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-from antlion.controllers.relay_lib.generic_relay_device import GenericRelayDevice
-from antlion.controllers.relay_lib.helpers import validate_key
-
-
-class BluetoothRelayDevice(GenericRelayDevice):
-    """A base class for bluetooth devices.
-
-    This base class is similar to GenericRelayDevice, but requires a mac_address
-    to be set from within the config taken in. This helps with type checking
-    for use of relays against bluetooth utils.
-    """
-    def __init__(self, config, relay_rig):
-        GenericRelayDevice.__init__(self, config, relay_rig)
-
-        self.mac_address = validate_key('mac_address', config, str,
-                                        self.__class__.__name__)
-
-    def get_mac_address(self):
-        """Returns the mac address of this device."""
-        return self.mac_address
-
diff --git a/src/antlion/controllers/relay_lib/dongles.py b/src/antlion/controllers/relay_lib/dongles.py
deleted file mode 100644
index 234a58b..0000000
--- a/src/antlion/controllers/relay_lib/dongles.py
+++ /dev/null
@@ -1,122 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import enum
-import time
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-# Necessary timeout in between commands
-CMD_TIMEOUT = 1.2
-# Pairing mode activation wait time
-PAIRING_MODE_WAIT_TIME = 4.5
-SINGLE_ACTION_SHORT_WAIT_TIME = 0.6
-SINGLE_ACTION_LONG_WAIT_TIME = 2.0
-MISSING_RELAY_MSG = 'Relay config for Three button "%s" missing relay "%s".'
-
-
-class Buttons(enum.Enum):
-    ACTION = 'Action'
-    NEXT = 'Next'
-    PREVIOUS = 'Previous'
-
-
-class SingleButtonDongle(BluetoothRelayDevice):
-    """A Bluetooth dongle with one generic button Normally action.
-
-    Wraps the button presses, as well as the special features like pairing.
-    """
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relay(Buttons.ACTION.value)
-
-    def enter_pairing_mode(self):
-        """Enters pairing mode. Blocks the thread until pairing mode is set.
-
-        Holds down the 'ACTION' buttons for PAIRING_MODE_WAIT_TIME seconds.
-        """
-        self.relays[Buttons.ACTION.value].set_nc_for(
-            seconds=PAIRING_MODE_WAIT_TIME)
-
-    def press_play_pause(self):
-        """Briefly presses the Action button."""
-        self.relays[Buttons.ACTION.value].set_nc_for(
-            seconds=SINGLE_ACTION_SHORT_WAIT_TIME)
-
-    def press_vr_mode(self):
-        """Long press the Action button."""
-        self.relays[Buttons.ACTION.value].set_nc_for(
-            seconds=SINGLE_ACTION_LONG_WAIT_TIME)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-
-    def clean_up(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.clean_up(self)
-
-
-class ThreeButtonDongle(BluetoothRelayDevice):
-    """A Bluetooth dongle with three generic buttons Normally action, next, and
-     previous.
-
-    Wraps the button presses, as well as the special features like pairing.
-    """
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-
-    def clean_up(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.clean_up(self)
-
-    def enter_pairing_mode(self):
-        """Enters pairing mode. Blocks the thread until pairing mode is set.
-
-        Holds down the 'ACTION' buttons for a little over 5 seconds.
-        """
-        self.relays[Buttons.ACTION.value].set_nc_for(
-            seconds=PAIRING_MODE_WAIT_TIME)
-
-    def press_play_pause(self):
-        """Briefly presses the Action button."""
-        self.relays[Buttons.ACTION.value].set_nc_for(
-            seconds=SINGLE_ACTION_SHORT_WAIT_TIME)
-        time.sleep(CMD_TIMEOUT)
-
-    def press_vr_mode(self):
-        """Long press the Action button."""
-        self.relays[Buttons.ACTION.value].set_nc_for(
-            seconds=SINGLE_ACTION_LONG_WAIT_TIME)
-        time.sleep(CMD_TIMEOUT)
-
-    def press_next(self):
-        """Briefly presses the Next button."""
-        self.relays[Buttons.NEXT.value].set_nc_for(
-            seconds=SINGLE_ACTION_SHORT_WAIT_TIME)
-        time.sleep(CMD_TIMEOUT)
-
-    def press_previous(self):
-        """Briefly presses the Previous button."""
-        self.relays[Buttons.PREVIOUS.value].set_nc_for(
-            seconds=SINGLE_ACTION_SHORT_WAIT_TIME)
-        time.sleep(CMD_TIMEOUT)
diff --git a/src/antlion/controllers/relay_lib/earstudio_receiver.py b/src/antlion/controllers/relay_lib/earstudio_receiver.py
deleted file mode 100644
index 50bf62d..0000000
--- a/src/antlion/controllers/relay_lib/earstudio_receiver.py
+++ /dev/null
@@ -1,148 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import enum
-import time
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-SHORT_PRESS_WAIT_TIME = 0.5
-MEDIUM_PRESS_WAIT_TIME = 3.0
-LONG_PRESS_WAIT_TIME = 4.5
-WAIT_FOR_EFFECT_TIME = 1
-
-
-class Buttons(enum.Enum):
-    NEXT = 'Next'
-    PREVIOUS = "Previous"
-    PLAY_PAUSE = 'Play_pause'
-    VOLUME_UP = "Volume_up"
-    VOLUME_DOWN = "Volume_down"
-
-
-class EarstudioReceiver(BluetoothRelayDevice):
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def power_on(self):
-        """Power on the Earstudio device.
-
-        BLUE LED blinks once when power is on. "power-on sound" plays when it is
-        on. Automatically connects to a device that has been connected before.
-        GREEN LED blinks once every 3 seconds after the "connection sound."
-        Enters Discoverable Mode/Paring Mode when there is no device that has
-        been connected before. GREEN LED blinks twice every 0.5 seconds.
-        """
-        self.relays[Buttons.PLAY_PAUSE.value].set_nc_for(MEDIUM_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def power_off(self):
-        """Power off the Earstudio device.
-
-        RED LED blinks once right before power off. "power-off sound" plays when
-        it is off.
-        """
-        self.relays[Buttons.PLAY_PAUSE.value].set_nc_for(LONG_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_play_pause(self):
-        """Toggle audio play state.
-
-        GREEN LED slowly blinks once every 3 seconds during Bluetooth/USB
-        playback.
-        """
-        self.relays[Buttons.PLAY_PAUSE.value].set_nc_for(SHORT_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_accept_call(self):
-        """Receive incoming call.
-
-        BLUE LED slowly blinks once every 3 seconds
-        "Call-receiving sound" when received.
-        """
-        self.relays[Buttons.PLAY_PAUSE.value].set_nc_for(SHORT_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_reject_call(self):
-        """Reject incoming call.
-
-        "Call-rejection sound" when refused.
-        """
-        self.relays[Buttons.PLAY_PAUSE.value].set_nc_for(MEDIUM_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_end_call(self):
-        """End ongoing call.
-
-        "Call-end sound" when ended.
-        """
-        self.relays[Buttons.PLAY_PAUSE.value].set_nc_for(SHORT_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_next(self):
-        """Skip to the next track."""
-        self.relays[Buttons.NEXT.value].set_nc_for(SHORT_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def toggle_ambient_mode(self):
-        """Turn ambient mode on/off.
-
-        Only available during playback.
-        To use it, you must set 'Ambient Shortcut Key' to 'on' in the EarStudio
-        app.
-        """
-        self.relays[Buttons.NEXT.value].set_nc_for(MEDIUM_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_previous(self):
-        """Rewind to beginning of current or previous track."""
-        self.relays[Buttons.PREVIOUS.value].set_nc_for(SHORT_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def enter_pairing_mode(self):
-        """Enter BlueTooth pairing mode.
-
-        GREEN LED blinks twice every 0.5 seconds after "enter paring-mode
-        sound." Disconnects from the current connected device when entering
-        this mode.
-        """
-        self.relays[Buttons.PREVIOUS.value].set_nc_for(MEDIUM_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_volume_up(self, press_duration=SHORT_PRESS_WAIT_TIME):
-        """Turn up the volume.
-
-        Volume increases by 0.5dB for each press.
-        Press&holding the button increases the volume consistently up to 6dB.
-        Args:
-          press_duration (int|float): how long to hold button for.
-        """
-        self.relays[Buttons.VOLUME_UP.value].set_nc_for(press_duration)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_volume_down(self, press_duration=SHORT_PRESS_WAIT_TIME):
-        """Turn down the volume.
-
-        Volume decreases by 0.5dB for each press.
-        Press&hold the button decreases the volume consistently down to -60dB.
-        Pressing the button at the minimum volume turns to a mute level.
-        Args:
-          press_duration (int|float): how long to hold button for.
-        """
-        self.relays[Buttons.VOLUME_DOWN.value].set_nc_for(press_duration)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
diff --git a/src/antlion/controllers/relay_lib/errors.py b/src/antlion/controllers/relay_lib/errors.py
deleted file mode 100644
index 5af5d60..0000000
--- a/src/antlion/controllers/relay_lib/errors.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-from antlion import signals
-
-
-class RelayConfigError(signals.ControllerError):
-    """An error found within the RelayRig config file."""
-
-
-class RelayDeviceConnectionError(signals.ControllerError):
-    """An error for being unable to connect to the device."""
diff --git a/src/antlion/controllers/relay_lib/fugu_remote.py b/src/antlion/controllers/relay_lib/fugu_remote.py
deleted file mode 100644
index db706c0..0000000
--- a/src/antlion/controllers/relay_lib/fugu_remote.py
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import time
-import enum
-
-from antlion.controllers.relay_lib.relay import SynchronizeRelays
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-PAIRING_MODE_WAIT_TIME = 5.2
-
-
-class Buttons(enum.Enum):
-    HOME = 'Home'
-    BACK = 'Back'
-    PLAY_PAUSE = 'Play'
-
-
-class FuguRemote(BluetoothRelayDevice):
-    """A Nexus Player (Fugu) Remote.
-
-    Wraps the button presses, as well as the special features like pairing.
-    """
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-        # If the Fugu remote does have a power relay attached, turn it on.
-        power = 'Power'
-        if power in self.relays:
-            self.relays[power].set_nc()
-
-    def clean_up(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.clean_up(self)
-
-    def enter_pairing_mode(self):
-        """Enters pairing mode. Blocks the thread until pairing mode is set.
-
-        Holds down the 'Home' and 'Back' buttons for a little over 5 seconds.
-        """
-        with SynchronizeRelays():
-            self.hold_down(Buttons.HOME.value)
-            self.hold_down(Buttons.BACK.value)
-
-        time.sleep(PAIRING_MODE_WAIT_TIME)
-
-        with SynchronizeRelays():
-            self.release(Buttons.HOME.value)
-            self.release(Buttons.BACK.value)
-
-    def press_play_pause(self):
-        """Briefly presses the Play/Pause button."""
-        self.press(Buttons.PLAY_PAUSE.value)
-
-    def press_home(self):
-        """Briefly presses the Home button."""
-        self.press(Buttons.HOME.value)
-
-    def press_back(self):
-        """Briefly presses the Back button."""
-        self.press(Buttons.BACK.value)
diff --git a/src/antlion/controllers/relay_lib/generic_relay_device.py b/src/antlion/controllers/relay_lib/generic_relay_device.py
deleted file mode 100644
index cf93400..0000000
--- a/src/antlion/controllers/relay_lib/generic_relay_device.py
+++ /dev/null
@@ -1,88 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.relay_lib.errors import RelayConfigError
-from antlion.controllers.relay_lib.relay import SynchronizeRelays
-from antlion.controllers.relay_lib.relay_device import RelayDevice
-
-MISSING_RELAY_MSG = 'Relay config for %s device "%s" missing relay "%s".'
-
-
-class GenericRelayDevice(RelayDevice):
-    """A default, all-encompassing implementation of RelayDevice.
-
-    This class allows for quick access to getting relay switches through the
-    subscript ([]) operator. Note that it does not allow for re-assignment or
-    additions to the relays dictionary.
-    """
-
-    def __init__(self, config, relay_rig):
-        RelayDevice.__init__(self, config, relay_rig)
-
-    def _ensure_config_contains_relays(self, relay_names):
-        for relay_name in relay_names:
-            self._ensure_config_contains_relay(relay_name)
-
-    def _ensure_config_contains_relay(self, relay_name):
-        """Throws an error if the relay does not exist."""
-        if relay_name not in self.relays:
-            raise RelayConfigError(MISSING_RELAY_MSG % (self.__class__.__name__,
-                                                        self.name, relay_name))
-
-    def get_button_names(self):
-        """Returns the list of all button names."""
-        return list(self.relays.keys())
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        with SynchronizeRelays():
-            for relay in self.relays.values():
-                relay.set_no()
-
-    def clean_up(self):
-        """Sets all relays to their default state (off)."""
-        with SynchronizeRelays():
-            for relay in self.relays.values():
-                if relay.is_dirty():
-                    relay.set_no()
-
-    def press(self, button_name):
-        """Presses the button for a short period of time.
-
-        Args:
-            button_name: the name of the button to press.
-        """
-        self.relays[button_name].set_nc_for()
-
-    def hold_down(self, button_name):
-        """Holds down the button until release is called.
-
-        If the button is already being held, the state does not change.
-
-        Args:
-            button_name: the name of the button to hold down.
-        """
-        self.relays[button_name].set_nc()
-
-    def release(self, button_name):
-        """Releases the held down button with name 'button_name'.
-
-        If the button is already depressed, the state does not change.
-
-        Args:
-            button_name: the name of the button to release.
-        """
-        self.relays[button_name].set_no()
diff --git a/src/antlion/controllers/relay_lib/headset.py b/src/antlion/controllers/relay_lib/headset.py
deleted file mode 100644
index 119b4f6..0000000
--- a/src/antlion/controllers/relay_lib/headset.py
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import time
-import enum
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-PAIRING_MODE_WAIT_TIME = 9
-POWER_TOGGLE_WAIT_TIME = 2
-
-
-class Buttons(enum.Enum):
-    POWER = 'Power'
-
-
-class Headset(BluetoothRelayDevice):
-    """Headset with same Power and Pair Button.
-
-    Wraps the button presses, as well as the special features like pairing.
-    """
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def _hold_button(self, button, seconds):
-        self.hold_down(button.value)
-        time.sleep(seconds)
-        self.release(button.value)
-
-    def power_off(self):
-        self._hold_button(Buttons.POWER, POWER_TOGGLE_WAIT_TIME)
-
-    def turn_power_on_and_enter_pairing_mode(self):
-        self._hold_button(Buttons.POWER, PAIRING_MODE_WAIT_TIME)
diff --git a/src/antlion/controllers/relay_lib/helpers.py b/src/antlion/controllers/relay_lib/helpers.py
deleted file mode 100644
index 463946c..0000000
--- a/src/antlion/controllers/relay_lib/helpers.py
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-from antlion.controllers.relay_lib.errors import RelayConfigError
-from six import string_types
-
-MISSING_KEY_ERR_MSG = 'key "%s" missing from %s. Offending object:\n %s'
-TYPE_MISMATCH_ERR_MSG = 'Key "%s" is of type %s. Expecting %s.' \
-                        ' Offending object:\n %s'
-
-
-def validate_key(key, dictionary, expected_type, source):
-    """Validates if a key exists and its value is the correct type.
-    Args:
-        key: The key in dictionary.
-        dictionary: The dictionary that should contain key.
-        expected_type: the type that key's value should have.
-        source: The name of the object being checked. Used for error messages.
-
-    Returns:
-        The value of dictionary[key] if no error was raised.
-
-    Raises:
-        RelayConfigError if the key does not exist, or is not of expected_type.
-    """
-    if key not in dictionary:
-        raise RelayConfigError(MISSING_KEY_ERR_MSG % (key, source, dictionary))
-    if expected_type == str:
-        if not isinstance(dictionary[key], string_types):
-            raise RelayConfigError(TYPE_MISMATCH_ERR_MSG %
-                                   (key, dictionary[key], expected_type,
-                                    dictionary))
-    elif not isinstance(dictionary[key], expected_type):
-        raise RelayConfigError(TYPE_MISMATCH_ERR_MSG %
-                               (key, dictionary[key], expected_type,
-                                dictionary))
-    return dictionary[key]
diff --git a/src/antlion/controllers/relay_lib/i6s_headset.py b/src/antlion/controllers/relay_lib/i6s_headset.py
deleted file mode 100644
index 7de5eba..0000000
--- a/src/antlion/controllers/relay_lib/i6s_headset.py
+++ /dev/null
@@ -1,92 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import enum
-
-from antlion.controllers.relay_lib.errors import RelayConfigError
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-PAIRING_MODE_WAIT_TIME = 3
-WAIT_TIME = 0.1
-MISSING_RELAY_MSG = 'Relay config for i6s Headset "%s" missing relay "%s".'
-
-
-class Buttons(enum.Enum):
-    POWER = "Power"
-    NEXT = 'Next'
-    PREVIOUS = "Previous"
-    PLAY_PAUSE = 'Play_pause'
-    PAIR = "Pair"
-    VOLUME_UP = "Volume_up"
-    VOLUME_DOWN = "Volume_down"
-
-
-class I6sHeadset(BluetoothRelayDevice):
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-
-    def clean_up(self):
-        """Turns off headset."""
-        self.relays[Buttons.PAIR.value].set_no_for(PAIRING_MODE_WAIT_TIME)
-
-    def enter_pairing_mode(self):
-        """Sets relay in paring mode."""
-        self.relays[Buttons.PAIR.value].set_no_for(PAIRING_MODE_WAIT_TIME)
-
-    def power_on(self):
-        """Power on relay."""
-        self.relays[Buttons.POWER.value].set_no_for(WAIT_TIME)
-
-    def press_play_pause(self):
-        """
-        Sets relay to
-            Play state : if there is no A2DP_streaming.
-            Pause state : if there is A2DP_streaming.
-        """
-        self.relays[Buttons.PLAY_PAUSE.value].set_no_for(WAIT_TIME)
-
-    def press_next(self):
-        """Skips to next song from relay_device."""
-        self.relays[Buttons.NEXT.value].set_no_for(WAIT_TIME)
-
-    def press_previous(self):
-        """Skips to previous song from relay_device."""
-        self.relays[Buttons.PREVIOUS.value].set_no_for(WAIT_TIME)
-
-    def press_volume_up(self):
-        """Increases volume from relay_device."""
-        self.relays[Buttons.VOLUME_UP.value].set_no_for(WAIT_TIME)
-
-    def press_volume_down(self):
-        """Decreases volume from relay_device."""
-        self.relays[Buttons.VOLUME_DOWN.value].set_no_for(WAIT_TIME)
-
-    def press_initiate_call(self):
-        """Initiate call from relay device."""
-        for i in range(0, 2):
-            self.relays[Buttons.POWER.value].set_no_for(WAIT_TIME)
-        return True
-
-    def press_accept_call(self):
-        """Accepts call from relay device."""
-        self.relays[Buttons.POWER.value].set_no_for(WAIT_TIME)
-        return True
diff --git a/src/antlion/controllers/relay_lib/jaybird_x3.py b/src/antlion/controllers/relay_lib/jaybird_x3.py
deleted file mode 100644
index 991267a..0000000
--- a/src/antlion/controllers/relay_lib/jaybird_x3.py
+++ /dev/null
@@ -1,115 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import enum
-import time
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-SHORT_PRESS_WAIT_TIME = 0.5
-MED_PRESS_WAIT_TIME = 1.5
-POWER_ON_WAIT_TIME = 2.5
-LONG_PRESS_WAIT_TIME = 4.5
-
-WAIT_FOR_EFFECT_TIME = 2.5
-
-
-class Buttons(enum.Enum):
-    VOLUME_UP = "Volume_up"
-    VOLUME_DOWN = "Volume_down"
-    POWER = "Power"
-
-
-class JaybirdX3Earbuds(BluetoothRelayDevice):
-    """Jaybird X3 earbuds model
-
-    A relay device class for Jaybird X3 earbuds that provides basic Bluetooth
-    """
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def power_off(self):
-        """If the device powers off, the LED will flash red before it
-        powers off. A voice prompt will say "POWER_OFF".
-        """
-        self.relays[Buttons.POWER.value].set_nc_for(LONG_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def power_on(self):
-        """If the device powers on, the LED will flash green.
-        A voice prompt will say "POWER ON".
-        """
-        self.relays[Buttons.POWER.value].set_nc_for(POWER_ON_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def enter_pairing_mode(self):
-        """The Jaybird can only enter pairing mode from an OFF state.
-        """
-        self.power_on()
-        self.power_off()
-        self.relays[Buttons.POWER.value].set_nc_for(LONG_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_play_pause(self):
-        """Toggles the audio play state."""
-        self.relays[Buttons.POWER.value].set_nc_for(SHORT_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def activate_voice_commands(self):
-        """Activates voice commands during music streaming."""
-        self.relays[Buttons.POWER.value].set_nc_for(MED_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_accept_call(self):
-        """Receives an incoming call."""
-        self.relays[Buttons.POWER.value].set_nc_for(SHORT_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_reject_call(self):
-        """Rejects an incoming call."""
-        self.relays[Buttons.POWER.value].set_nc_for(MED_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_next(self):
-        """Skips to the next track."""
-        self.relays[Buttons.VOLUME_UP.value].set_nc_for(MED_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_previous(self):
-        """Rewinds to beginning of current or previous track."""
-        self.relays[Buttons.VOLUME_DOWN.value].set_nc_for(MED_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_volume_up(self):
-        """Turns up the volume."""
-        self.relays[Buttons.VOLUME_UP.value].set_nc_for(SHORT_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_volume_down(self):
-        """Turns down the volume."""
-        self.relays[Buttons.VOLUME_DOWN.value].set_nc_for(SHORT_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def toggle_hands_free(self):
-        """Switches call audio between the phone and X3 buds."""
-        self.relays[Buttons.VOLUME_UP.value].set_nc_for(MED_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def mute_phone_call(self):
-        """Mutes phone call audio."""
-        self.relays[Buttons.VOLUME_DOWN.value].set_nc_for(MED_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
diff --git a/src/antlion/controllers/relay_lib/logitech_headset.py b/src/antlion/controllers/relay_lib/logitech_headset.py
deleted file mode 100644
index 5c95bac..0000000
--- a/src/antlion/controllers/relay_lib/logitech_headset.py
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Device Details:
-https://www.logitech.com/en-in/product/bluetooth-audio-adapter#specification-tabular
-"""
-import enum
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-PAIRING_MODE_WAIT_TIME = 5
-WAIT_TIME = 0.1
-
-
-class Buttons(enum.Enum):
-    POWER = 'Power'
-    PAIR = 'Pair'
-
-
-class LogitechAudioReceiver(BluetoothRelayDevice):
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-
-    def clean_up(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.clean_up(self)
-
-    def power_on(self):
-        """Power on relay."""
-        self.relays[Buttons.POWER.value].set_nc()
-
-    def enter_pairing_mode(self):
-        """Sets relay in paring mode."""
-        self.relays[Buttons.PAIR.value].set_nc()
diff --git a/src/antlion/controllers/relay_lib/power_supply.py b/src/antlion/controllers/relay_lib/power_supply.py
deleted file mode 100644
index f1c6213..0000000
--- a/src/antlion/controllers/relay_lib/power_supply.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import time
-import enum
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-POWER_TOGGLE_WAIT_TIME = 0.5
-
-class Buttons(enum.Enum):
-    POWER = 'Power'
-
-
-class PowerSupply(BluetoothRelayDevice):
-    """Power Supply for Headset.
-
-    Wraps the button presses.
-    """
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def _hold_button(self, button, seconds):
-        self.hold_down(button.value)
-        time.sleep(seconds)
-        self.release(button.value)
-
-    def power_off(self):
-        self._hold_button(Buttons.POWER, POWER_TOGGLE_WAIT_TIME)
-
-    def setup(self):
-        """Do nothing, since this is not a headset"""
-        return True
-
-    def clean_up(self):
-        """Do nothing, since this is not a headset"""
-        return True
diff --git a/src/antlion/controllers/relay_lib/rdl_relay_board.py b/src/antlion/controllers/relay_lib/rdl_relay_board.py
deleted file mode 100644
index e4d5c37..0000000
--- a/src/antlion/controllers/relay_lib/rdl_relay_board.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.relay_lib.relay import RelayState
-from antlion.controllers.relay_lib.usb_relay_board_base import UsbRelayBoardBase
-from pylibftdi import BitBangDevice
-
-
-class RdlRelayBoard(UsbRelayBoardBase):
-    def set(self, relay_position, value):
-        """Returns the current status of the passed in relay.
-
-        Args:
-            relay_position: Relay position.
-            value: Turn_on or Turn_off the relay for the given relay_position.
-        """
-        with BitBangDevice(self.device) as bb:
-            if value == RelayState.NO:
-                bb.port |= self.address[relay_position]
-            else:
-                bb.port &= ~(self.address[relay_position])
-        self.status_dict[relay_position] = value
diff --git a/src/antlion/controllers/relay_lib/relay.py b/src/antlion/controllers/relay_lib/relay.py
deleted file mode 100644
index fbac1de..0000000
--- a/src/antlion/controllers/relay_lib/relay.py
+++ /dev/null
@@ -1,207 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from enum import Enum
-from time import sleep
-
-from antlion.controllers.relay_lib.errors import RelayConfigError
-
-
-class RelayState(Enum):
-    """Enum for possible Relay States."""
-    # Pretend this means 'OFF'
-    NO = 'NORMALLY_OPEN'
-    # Pretend this means 'ON'
-    NC = 'NORMALLY_CLOSED'
-
-
-class SynchronizeRelays:
-    """A class that allows for relays to change state nearly simultaneously.
-
-    Can be used with the 'with' statement in Python:
-
-    with SynchronizeRelays():
-        relay1.set_no()
-        relay2.set_nc()
-
-    Note that the thread will still wait for RELAY_TRANSITION_WAIT_TIME
-    after execution leaves the 'with' statement.
-    """
-    _sync_sleep_flag = False
-
-    def __enter__(self):
-        self.prev_toggle_time = Relay.transition_wait_time
-        self.prev_sync_flag = SynchronizeRelays._sync_sleep_flag
-        Relay.transition_wait_time = 0
-        SynchronizeRelays._sync_sleep_flag = False
-
-    def __exit__(self, type, value, traceback):
-        if SynchronizeRelays._sync_sleep_flag:
-            sleep(Relay.transition_wait_time)
-
-        Relay.transition_wait_time = self.prev_toggle_time
-        SynchronizeRelays._sync_sleep_flag = self.prev_sync_flag
-
-
-class Relay(object):
-    """A class representing a single relay switch on a RelayBoard.
-
-    References to these relays are stored in both the RelayBoard and the
-    RelayDevice classes under the variable "relays". GenericRelayDevice can also
-    access these relays through the subscript ([]) operator.
-
-    At the moment, relays only have a valid state of 'ON' or 'OFF'. This may be
-    extended in a subclass if needed. Keep in mind that if this is done, changes
-    will also need to be made in the RelayRigParser class to initialize the
-    relays.
-
-    """
-    """How long to wait for relays to transition state."""
-    transition_wait_time = .2
-    button_press_time = .25
-
-    def __init__(self, relay_board, position):
-        self.relay_board = relay_board
-        self.position = position
-        self._original_state = None
-        self.relay_id = "%s/%s" % (self.relay_board.name, self.position)
-
-    def set_no(self):
-        """Sets the relay to the 'NO' state. Shorthand for set(RelayState.NO).
-
-        Blocks the thread for Relay.transition_wait_time.
-        """
-        self.set(RelayState.NO)
-
-    def set_nc(self):
-        """Sets the relay to the 'NC' state. Shorthand for set(RelayState.NC).
-
-        Blocks the thread for Relay.transition_wait_time.
-
-        """
-        self.set(RelayState.NC)
-
-    def toggle(self):
-        """Swaps the state from 'NO' to 'NC' or 'NC' to 'NO'.
-        Blocks the thread for Relay.transition_wait_time.
-        """
-        if self.get_status() == RelayState.NO:
-            self.set(RelayState.NC)
-        else:
-            self.set(RelayState.NO)
-
-    def set(self, state):
-        """Sets the relay to the 'NO' or 'NC' state.
-
-        Blocks the thread for Relay.transition_wait_time.
-
-        Args:
-            state: either 'NO' or 'NC'.
-
-        Raises:
-            ValueError if state is not 'NO' or 'NC'.
-
-        """
-        if self._original_state is None:
-            self._original_state = self.relay_board.get_relay_status(
-                self.position)
-
-        if state is not RelayState.NO and state is not RelayState.NC:
-            raise ValueError(
-                'Invalid state. Received "%s". Expected any of %s.' %
-                (state, [state for state in RelayState]))
-        if self.get_status() != state:
-            self.relay_board.set(self.position, state)
-            SynchronizeRelays._sync_sleep_flag = True
-            sleep(Relay.transition_wait_time)
-
-    def set_no_for(self, seconds=button_press_time):
-        """Sets the relay to 'NORMALLY_OPEN' for seconds. Blocks the thread.
-
-        Args:
-            seconds: The number of seconds to sleep for.
-        """
-        self.set_no()
-        sleep(seconds)
-        self.set_nc()
-
-    def set_nc_for(self, seconds=button_press_time):
-        """Sets the relay to 'NORMALLY_CLOSED' for seconds. Blocks the thread.
-
-        Respects Relay.transition_wait_time for toggling state.
-
-        Args:
-            seconds: The number of seconds to sleep for.
-        """
-        self.set_nc()
-        sleep(seconds)
-        self.set_no()
-
-    def get_status(self):
-        return self.relay_board.get_relay_status(self.position)
-
-    def clean_up(self):
-        """Does any clean up needed to allow the next series of tests to run.
-
-        For now, all this does is switches to its previous state. Inheriting
-        from this class and overriding this method would be the best course of
-        action to allow a more complex clean up to occur. If you do this, be
-        sure to make the necessary modifications in RelayRig.initialize_relay
-        and RelayRigParser.parse_json_relays.
-        """
-        if self._original_state is not None:
-            self.set(self._original_state)
-
-    def is_dirty(self):
-        return self._original_state is not None
-
-
-class RelayDict(object):
-    """A wrapped dictionary that gives config errors upon failure.
-
-    Has the same interface as a dictionary, but when getting the key fails, the
-    dictionary returns a RelayConfigError, letting the user know that the reason
-    the dict failed to return a relay is because the relay was not found in the
-    config.
-
-    Also prevents modification of elements, because changing the relays here
-    does not change what they are in hardware.
-    """
-    ERROR_MESSAGE = ('Error: Attempted to get relay "%s" in %s "%s" but the '
-                     'relay does not exist.\nExisting relays are: %s.\nMake '
-                     'sure the missing relay is added to the config file, and '
-                     'is properly setup.')
-
-    def __init__(self, relay_device, input_dict):
-        self.relay_device = relay_device
-        self._store = input_dict
-
-    def __getitem__(self, key):
-        try:
-            return self._store[key]
-        except KeyError:
-            raise RelayConfigError(self.ERROR_MESSAGE %
-                                   (key, type(self.relay_device),
-                                    self.relay_device.name, self._store))
-
-    def __iter__(self):
-        return iter(self._store)
-
-    def __len__(self):
-        return len(self._store)
-
-    def __repr__(self):
-        return repr(self._store)
diff --git a/src/antlion/controllers/relay_lib/relay_board.py b/src/antlion/controllers/relay_lib/relay_board.py
deleted file mode 100644
index 464326d..0000000
--- a/src/antlion/controllers/relay_lib/relay_board.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.relay_lib.errors import RelayConfigError
-from antlion.controllers.relay_lib.helpers import validate_key
-from antlion.controllers.relay_lib.relay import Relay
-
-
-class RelayBoard(object):
-    """Handles interfacing with the Relays and RelayDevices.
-
-    This is the base class for all RelayBoards.
-    """
-
-    def __init__(self, config):
-        """Creates a RelayBoard instance. Handles naming and relay creation.
-
-        Args:
-            config: A configuration dictionary, usually pulled from an element
-            under in "boards" list in the relay rig config file.
-        """
-        self.name = validate_key('name', config, str, 'config')
-        if '/' in self.name:
-            raise RelayConfigError('RelayBoard name cannot contain a "/".')
-        self.relays = dict()
-        for pos in self.get_relay_position_list():
-            self.relays[pos] = Relay(self, pos)
-
-    def set(self, relay_position, state):
-        """Sets the relay to the given state.
-
-        Args:
-            relay_position: the relay having its state modified.
-            state: the state to set the relay to. Currently only states NO and
-                   NC are supported.
-        """
-        raise NotImplementedError()
-
-    def get_relay_position_list(self):
-        """Returns a list of all possible relay positions."""
-        raise NotImplementedError()
-
-    def get_relay_status(self, relay):
-        """Returns the state of the given relay."""
-        raise NotImplementedError()
diff --git a/src/antlion/controllers/relay_lib/relay_device.py b/src/antlion/controllers/relay_lib/relay_device.py
deleted file mode 100644
index 06bf42f..0000000
--- a/src/antlion/controllers/relay_lib/relay_device.py
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.relay_lib.errors import RelayConfigError
-from antlion.controllers.relay_lib.helpers import validate_key
-
-
-class RelayDevice(object):
-    """The base class for all relay devices.
-
-    RelayDevice has access to both its relays as well as the relay rig it is
-    a part of. Note that you can receive references to the relay_boards
-    through relays[0...n].board. The relays are not guaranteed to be on
-    the same relay board.
-    """
-
-    def __init__(self, config, relay_rig):
-        """Creates a RelayDevice.
-
-        Args:
-            config: The dictionary found in the config file for this device.
-            You can add your own params to the config file if needed, and they
-            will be found in this dictionary.
-            relay_rig: The RelayRig the device is attached to. This won't be
-            useful for classes that inherit from RelayDevice, so just pass it
-            down to this __init__.
-        """
-        self.rig = relay_rig
-        self.relays = dict()
-
-        validate_key('name', config, str, '"devices" element')
-        self.name = config['name']
-
-        relays = validate_key('relays', config, dict, '"devices" list element')
-        if len(relays) < 1:
-            raise RelayConfigError(
-                'Key "relays" must have at least 1 element.')
-
-        for name, relay_id in relays.items():
-            self.relays[name] = relay_rig.relays[relay_id]
-
-    def setup(self):
-        """Sets up the relay device to be ready for commands."""
-
-    def clean_up(self):
-        """Sets the relay device back to its inert state."""
diff --git a/src/antlion/controllers/relay_lib/relay_rig.py b/src/antlion/controllers/relay_lib/relay_rig.py
deleted file mode 100644
index 835dd66..0000000
--- a/src/antlion/controllers/relay_lib/relay_rig.py
+++ /dev/null
@@ -1,177 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import collections
-
-from antlion.controllers.relay_lib.ak_xb10_speaker import AkXB10Speaker
-from antlion.controllers.relay_lib.dongles import SingleButtonDongle
-from antlion.controllers.relay_lib.dongles import ThreeButtonDongle
-from antlion.controllers.relay_lib.earstudio_receiver import EarstudioReceiver
-from antlion.controllers.relay_lib.errors import RelayConfigError
-from antlion.controllers.relay_lib.fugu_remote import FuguRemote
-from antlion.controllers.relay_lib.generic_relay_device import GenericRelayDevice
-from antlion.controllers.relay_lib.headset import Headset
-from antlion.controllers.relay_lib.helpers import validate_key
-from antlion.controllers.relay_lib.i6s_headset import I6sHeadset
-from antlion.controllers.relay_lib.jaybird_x3 import JaybirdX3Earbuds
-from antlion.controllers.relay_lib.logitech_headset import LogitechAudioReceiver
-from antlion.controllers.relay_lib.power_supply import PowerSupply
-from antlion.controllers.relay_lib.rdl_relay_board import RdlRelayBoard
-from antlion.controllers.relay_lib.sain_smart_board import SainSmartBoard
-from antlion.controllers.relay_lib.sain_smart_8_channel_usb_relay_board import SainSmart8ChannelUsbRelayBoard
-from antlion.controllers.relay_lib.skullcandy import Skullcandy
-from antlion.controllers.relay_lib.sony_xb2_speaker import SonyXB2Speaker
-from antlion.controllers.relay_lib.sony_xb20_speaker import SonyXB20Speaker
-from antlion.controllers.relay_lib.tao_tronics_headset import TaoTronicsCarkit
-
-
-class RelayRig:
-    """A group of relay boards and their connected devices.
-
-    This class is also responsible for handling the creation of the relay switch
-    boards, as well as the devices and relays associated with them.
-
-    The boards dict can contain different types of relay boards. They share a
-    common interface through inheriting from RelayBoard. This layer can be
-    ignored by the user.
-
-    The relay devices are stored in a dict of (device_name: device). These
-    device references should be used by the user when they want to directly
-    interface with the relay switches. See RelayDevice or GeneralRelayDevice for
-    implementation.
-
-    """
-    DUPLICATE_ID_ERR_MSG = 'The {} "{}" is not unique. Duplicated in:\n {}'
-
-    # A dict of lambdas that instantiate relay board upon invocation.
-    # The key is the class type name, the value is the lambda.
-    _board_constructors = {
-        'SainSmartBoard':
-        lambda x: SainSmartBoard(x),
-        'RdlRelayBoard':
-        lambda x: RdlRelayBoard(x),
-        'SainSmart8ChannelUsbRelayBoard':
-        lambda x: SainSmart8ChannelUsbRelayBoard(x),
-    }
-
-    # Similar to the dict above, except for devices.
-    _device_constructors = {
-        'GenericRelayDevice': lambda x, rig: GenericRelayDevice(x, rig),
-        'FuguRemote': lambda x, rig: FuguRemote(x, rig),
-        'I6sHeadset': lambda x, rig: I6sHeadset(x, rig),
-        'JaybirdX3Earbuds': lambda x, rig: JaybirdX3Earbuds(x, rig),
-        "LogitechAudioReceiver" :lambda x, rig: LogitechAudioReceiver(x, rig),
-        'SonyXB2Speaker': lambda x, rig: SonyXB2Speaker(x, rig),
-        'SonyXB20Speaker': lambda x, rig: SonyXB20Speaker(x, rig),
-        'TaoTronicsCarkit': lambda x, rig: TaoTronicsCarkit(x, rig),
-        'AkXB10Speaker': lambda x, rig: AkXB10Speaker(x, rig),
-        'SingleButtonDongle': lambda x, rig: SingleButtonDongle(x, rig),
-        'ThreeButtonDongle': lambda x, rig: ThreeButtonDongle(x, rig),
-        'EarstudioReceiver': lambda x, rig: EarstudioReceiver(x, rig),
-        'Headset': lambda x, rig: Headset(x, rig),
-        'Skullcandy': lambda x, rig: Skullcandy(x, rig),
-        'PowerSupply': lambda x, rig: PowerSupply(x, rig),
-    }
-
-    def __init__(self, config):
-        self.relays = dict()
-        self.boards = dict()
-        self.devices = collections.OrderedDict()
-
-        validate_key('boards', config, list, 'relay config file')
-
-        for elem in config['boards']:
-            board = self.create_relay_board(elem)
-            if board.name in self.boards:
-                raise RelayConfigError(
-                    self.DUPLICATE_ID_ERR_MSG.format('name', elem['name'],
-                                                     elem))
-            self.boards[board.name] = board
-
-        # Note: 'boards' is a necessary value, 'devices' is not.
-        if 'devices' in config:
-            for elem in config['devices']:
-                relay_device = self.create_relay_device(elem)
-                if relay_device.name in self.devices:
-                    raise RelayConfigError(
-                        self.DUPLICATE_ID_ERR_MSG.format(
-                            'name', elem['name'], elem))
-                self.devices[relay_device.name] = relay_device
-        else:
-            device_config = dict()
-            device_config['name'] = 'GenericRelayDevice'
-            device_config['relays'] = dict()
-            for relay_id in self.relays:
-                device_config['relays'][relay_id] = relay_id
-            self.devices['device'] = self.create_relay_device(device_config)
-
-    def create_relay_board(self, config):
-        """Builds a RelayBoard from the given config.
-
-        Args:
-            config: An object containing 'type', 'name', 'relays', and
-            (optionally) 'properties'. See the example json file.
-
-        Returns:
-            A RelayBoard with the given type found in the config.
-
-        Raises:
-            RelayConfigError if config['type'] doesn't exist or is not a string.
-
-        """
-        validate_key('type', config, str, '"boards" element')
-        try:
-            ret = self._board_constructors[config['type']](config)
-        except LookupError:
-            raise RelayConfigError(
-                'RelayBoard with type {} not found. Has it been added '
-                'to the _board_constructors dict?'.format(config['type']))
-        for _, relay in ret.relays.items():
-            self.relays[relay.relay_id] = relay
-        return ret
-
-    def create_relay_device(self, config):
-        """Builds a RelayDevice from the given config.
-
-        When given no 'type' key in the config, the function will default to
-        returning a GenericRelayDevice with the relays found in the 'relays'
-        array.
-
-        Args:
-            config: An object containing 'name', 'relays', and (optionally)
-            type.
-
-        Returns:
-            A RelayDevice with the given type found in the config. If no type is
-            found, it will default to GenericRelayDevice.
-
-        Raises:
-            RelayConfigError if the type given does not match any from the
-            _device_constructors dictionary.
-
-        """
-        if 'type' in config:
-            if config['type'] not in RelayRig._device_constructors:
-                raise RelayConfigError(
-                    'Device with type {} not found. Has it been added '
-                    'to the _device_constructors dict?'.format(config['type']))
-            else:
-                device = self._device_constructors[config['type']](config,
-                                                                   self)
-
-        else:
-            device = GenericRelayDevice(config, self)
-
-        return device
diff --git a/src/antlion/controllers/relay_lib/sain_smart_8_channel_usb_relay_board.py b/src/antlion/controllers/relay_lib/sain_smart_8_channel_usb_relay_board.py
deleted file mode 100644
index 33d7f43..0000000
--- a/src/antlion/controllers/relay_lib/sain_smart_8_channel_usb_relay_board.py
+++ /dev/null
@@ -1,83 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.relay_lib.relay import RelayState
-from antlion.controllers.relay_lib.usb_relay_board_base import UsbRelayBoardBase
-from pylibftdi import BitBangDevice
-""" This library is to control the sainsmart board.
-
-Device:
-    https://www.sainsmart.com/products/8-channel-12v-usb-relay-module
-
-Additional setup steps:
-Change out pip/pip3 and python2.7/3.4 based on python version
-1. pip install pylibftdi
-2. pip install libusb1
-3. sudo apt-get install libftdi-dev
-4. Make this file /etc/udev/rules.d/99-libftdi.rules with root and add the lines below:
-SUBSYSTEMS=="usb", ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6001", GROUP="plugdev", MODE="0660"
-SUBSYSTEMS=="usb", ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6014", GROUP="plugdev", MODE="0660"
-5. Connect USB relay to computer and power board with necessary connectors
-6. Verify device is found by: python -m pylibftdi.examples.list_devices
-6a. Example output: FTDI:FT245R USB FIFO:A9079L5D
-7. The FIFO value is going to be your device name in the config
-8. Your config should look something like this (note FIFO name is used here):
-
-{
-    "_description": "This is an example skeleton of a ficticious relay.",
-    "testbed": [{
-        "_description": "A testbed with one relay",
-        "name": "relay_test",
-        "RelayDevice": {
-            "boards": [{
-                "type": "SainSmart8ChannelUsbRelayBoard",
-                "name": "ttyUSB0",
-                "device": "A9079L5D"
-            }],
-            "devices": [{
-                "type": "SingleButtonDongle",
-                "name": "aukey",
-                "mac_address": "e9:08:ef:2b:47:a1",
-                "relays": {
-                    "Action": "ttyUSB0/1"
-                }
-
-            }]
-        }
-    }],
-    "logpath": "/tmp/logs",
-    "testpaths": ["../tests"]
-}
-"""
-
-
-class SainSmart8ChannelUsbRelayBoard(UsbRelayBoardBase):
-    def set(self, relay_position, value):
-        """Returns the current status of the passed in relay.
-
-        Note that this board acts in reverse of normal relays.
-        EG: NO = NC and NC = NO
-
-        Args:
-            relay_position: Relay position.
-            value: Turn_on or Turn_off the relay for the given relay_position.
-        """
-        with BitBangDevice(self.device) as bb:
-            if value == RelayState.NO:
-                bb.port &= ~(self.address[relay_position])
-            else:
-                bb.port |= self.address[relay_position]
-        self.status_dict[relay_position] = value
diff --git a/src/antlion/controllers/relay_lib/sain_smart_board.py b/src/antlion/controllers/relay_lib/sain_smart_board.py
deleted file mode 100644
index b5bc310..0000000
--- a/src/antlion/controllers/relay_lib/sain_smart_board.py
+++ /dev/null
@@ -1,134 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import re
-from urllib.request import urlopen
-
-from antlion.controllers.relay_lib.errors import RelayDeviceConnectionError
-from antlion.controllers.relay_lib.helpers import validate_key
-from antlion.controllers.relay_lib.relay import RelayState
-from antlion.controllers.relay_lib.relay_board import RelayBoard
-
-BASE_URL = 'http://192.168.1.4/30000/'
-
-
-class SainSmartBoard(RelayBoard):
-    """Controls and queries SainSmart Web Relay Board.
-
-    Controls and queries SainSmart Web Relay Board, found here:
-    http://www.sainsmart.com/sainsmart-rj45-tcp-ip-remote-controller-board-with-8-channels-relay-integrated.html
-    this uses a web interface to toggle relays.
-
-    There is an unmentioned hidden status page that can be found at <root>/99/.
-    """
-
-    # No longer used. Here for debugging purposes.
-    #
-    # Old status pages. Used before base_url/99 was found.
-    # STATUS_1 = '40'
-    # STATUS_2 = '43'
-    #
-    # This is the regex used to parse the old status pages:
-    # r'y-\d(?P<relay>\d).+?> (?:&nbsp)?(?P<status>.*?)&'
-    #
-    # Pages that will turn all switches on or off, even the ghost switches.
-    # ALL_RELAY_OFF = '44'
-    # ALL_RELAY_ON = '45'
-
-    HIDDEN_STATUS_PAGE = '99'
-
-    VALID_RELAY_POSITIONS = [0, 1, 2, 3, 4, 5, 6, 7]
-    NUM_RELAYS = 8
-
-    def __init__(self, config):
-        # This will be lazy loaded
-        self.status_dict = None
-        self.base_url = validate_key('base_url', config, str, 'config')
-        if not self.base_url.endswith('/'):
-            self.base_url += '/'
-        super(SainSmartBoard, self).__init__(config)
-
-    def get_relay_position_list(self):
-        return self.VALID_RELAY_POSITIONS
-
-    def _load_page(self, relative_url):
-        """Loads a web page at self.base_url + relative_url.
-
-        Properly opens and closes the web page.
-
-        Args:
-            relative_url: The string appended to the base_url.
-
-        Returns:
-            the contents of the web page.
-
-        Raises:
-            A RelayDeviceConnectionError is raised if the page cannot be loaded.
-
-        """
-        try:
-            page = urlopen(self.base_url + relative_url)
-            result = page.read().decode('utf-8')
-            page.close()
-        except IOError:
-            raise RelayDeviceConnectionError(
-                'Unable to connect to board "{}" through {}'.format(
-                    self.name, self.base_url + relative_url))
-        return result
-
-    def _sync_status_dict(self):
-        """Returns a dictionary of relays and there current state."""
-        result = self._load_page(self.HIDDEN_STATUS_PAGE)
-        if 'TUX' not in result:
-            raise RelayDeviceConnectionError(
-                'Sainsmart board with URL %s has not completed initialization '
-                'after its IP was set, and must be power-cycled to prevent '
-                'random disconnections. After power-cycling, make sure %s/%s '
-                'has TUX appear in its output.' %
-                (self.base_url, self.base_url, self.HIDDEN_STATUS_PAGE))
-        status_string = re.search(r'">([01]*)TUX', result).group(1)
-
-        self.status_dict = {}
-        for index, char in enumerate(status_string):
-            self.status_dict[index] = (
-                RelayState.NC if char == '1' else RelayState.NO)
-
-    def _print_status(self):
-        """Prints out the list of relays and their current state."""
-        for i in range(0, 8):
-            print('Relay {}: {}'.format(i, self.status_dict[i]))
-
-    def get_relay_status(self, relay_position):
-        """Returns the current status of the passed in relay."""
-        if self.status_dict is None:
-            self._sync_status_dict()
-        return self.status_dict[relay_position]
-
-    def set(self, relay_position, value):
-        """Sets the given relay to be either ON or OFF, indicated by value."""
-        if self.status_dict is None:
-            self._sync_status_dict()
-        self._load_page(self._get_relay_url_code(relay_position, value))
-        self.status_dict[relay_position] = value
-
-    @staticmethod
-    def _get_relay_url_code(relay_position, no_or_nc):
-        """Returns the two digit code corresponding to setting the relay."""
-        if no_or_nc == RelayState.NC:
-            on_modifier = 1
-        else:
-            on_modifier = 0
-        return '{:02d}'.format(relay_position * 2 + on_modifier)
diff --git a/src/antlion/controllers/relay_lib/skullcandy.py b/src/antlion/controllers/relay_lib/skullcandy.py
deleted file mode 100644
index 078bbfd..0000000
--- a/src/antlion/controllers/relay_lib/skullcandy.py
+++ /dev/null
@@ -1,56 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import enum
-import time
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-PAIRING_MODE_WAIT_TIME = 5
-POWER_TOGGLE_WAIT_TIME = 1
-
-
-class Buttons(enum.Enum):
-    POWER = 'Power'
-
-
-class Skullcandy(BluetoothRelayDevice):
-    """Skullcandy Bluetooth Speaker model
-
-    Wraps the button presses, as well as the special features like pairing.
-    """
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def _hold_button(self, button, seconds):
-        self.hold_down(button.value)
-        time.sleep(seconds)
-        self.release(button.value)
-
-    def power_off(self):
-        self._hold_button(Buttons.POWER, POWER_TOGGLE_WAIT_TIME)
-
-    def turn_power_on_and_enter_pairing_mode(self):
-        self._hold_button(Buttons.POWER, PAIRING_MODE_WAIT_TIME)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-
-    def clean_up(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.clean_up(self)
diff --git a/src/antlion/controllers/relay_lib/sony_xb20_speaker.py b/src/antlion/controllers/relay_lib/sony_xb20_speaker.py
deleted file mode 100644
index 942a812..0000000
--- a/src/antlion/controllers/relay_lib/sony_xb20_speaker.py
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import enum
-import time
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-PAIRING_MODE_WAIT_TIME = 6
-POWER_TOGGLE_WAIT_TIME = 1
-
-
-class Buttons(enum.Enum):
-    POWER = 'Power'
-
-
-class SonyXB20Speaker(BluetoothRelayDevice):
-    """Sony XB20 Bluetooth Speaker model
-
-    Wraps the button presses, as well as the special features like pairing.
-    """
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def _hold_button(self, button, seconds):
-        self.hold_down(button.value)
-        time.sleep(seconds)
-        self.release(button.value)
-
-    def power_on(self):
-        self._hold_button(Buttons.POWER, POWER_TOGGLE_WAIT_TIME)
-
-    def power_off(self):
-        self._hold_button(Buttons.POWER, POWER_TOGGLE_WAIT_TIME)
-
-    def enter_pairing_mode(self):
-        self._hold_button(Buttons.POWER, PAIRING_MODE_WAIT_TIME)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-
-    def clean_up(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.clean_up(self)
diff --git a/src/antlion/controllers/relay_lib/sony_xb2_speaker.py b/src/antlion/controllers/relay_lib/sony_xb2_speaker.py
deleted file mode 100644
index 9c97c35..0000000
--- a/src/antlion/controllers/relay_lib/sony_xb2_speaker.py
+++ /dev/null
@@ -1,61 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import enum
-import time
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-PAIRING_MODE_WAIT_TIME = 5
-POWER_ON_WAIT_TIME = 2
-POWER_OFF_WAIT_TIME = 6
-
-
-class Buttons(enum.Enum):
-    POWER = 'Power'
-    PAIR = 'Pair'
-
-
-class SonyXB2Speaker(BluetoothRelayDevice):
-    """Sony XB2 Bluetooth Speaker model
-
-    Wraps the button presses, as well as the special features like pairing.
-    """
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def _hold_button(self, button, seconds):
-        self.hold_down(button.value)
-        time.sleep(seconds)
-        self.release(button.value)
-
-    def power_on(self):
-        self._hold_button(Buttons.POWER, POWER_ON_WAIT_TIME)
-
-    def power_off(self):
-        self._hold_button(Buttons.POWER, POWER_OFF_WAIT_TIME)
-
-    def enter_pairing_mode(self):
-        self._hold_button(Buttons.PAIR, PAIRING_MODE_WAIT_TIME)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-
-    def clean_up(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.clean_up(self)
diff --git a/src/antlion/controllers/relay_lib/tao_tronics_headset.py b/src/antlion/controllers/relay_lib/tao_tronics_headset.py
deleted file mode 100644
index 88bb61f..0000000
--- a/src/antlion/controllers/relay_lib/tao_tronics_headset.py
+++ /dev/null
@@ -1,77 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import enum
-import time
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-WAIT_TIME = 0.05
-
-
-class Buttons(enum.Enum):
-    NEXT = 'Next'
-    PREVIOUS = "Previous"
-    PLAY_PAUSE = 'Play_pause'
-    VOLUME_UP = "Volume_up"
-    VOLUME_DOWN = "Volume_down"
-
-
-class TaoTronicsCarkit(BluetoothRelayDevice):
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-
-    def press_play_pause(self):
-        """
-        Sets relay to
-            Play state : if there is no A2DP_streaming.
-            Pause state : if there is A2DP_streaming.
-        """
-        self.relays[Buttons.PLAY_PAUSE.value].set_no_for(WAIT_TIME)
-
-    def press_next(self):
-        """Skips to next song from relay_device."""
-        self.relays[Buttons.NEXT.value].set_no_for(WAIT_TIME)
-
-    def press_previous(self):
-        """Skips to previous song from relay_device."""
-        self.relays[Buttons.PREVIOUS.value].set_no_for(WAIT_TIME)
-
-    def press_volume_up(self):
-        """Increases volume from relay_device."""
-        self.relays[Buttons.VOLUME_UP.value].set_no_for(WAIT_TIME)
-
-    def press_volume_down(self):
-        """Decreases volume from relay_device."""
-        self.relays[Buttons.VOLUME_DOWN.value].set_no_for(WAIT_TIME)
-
-    def press_initiate_call(self):
-        """Initiate call from relay device."""
-        for i in range(0, 2):
-            self.press(Buttons.PLAY_PAUSE.value)
-            time.sleep(0.2)
-        return True
-
-    def press_accept_call(self):
-        """Accepts call from relay device."""
-        self.press(Buttons.PLAY_PAUSE.value)
-        return True
diff --git a/src/antlion/controllers/relay_lib/usb_relay_board_base.py b/src/antlion/controllers/relay_lib/usb_relay_board_base.py
deleted file mode 100644
index 45422eb..0000000
--- a/src/antlion/controllers/relay_lib/usb_relay_board_base.py
+++ /dev/null
@@ -1,107 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.relay_lib.relay import RelayState
-from antlion.controllers.relay_lib.relay_board import RelayBoard
-from pylibftdi import BitBangDevice
-
-
-class UsbRelayBoardBase(RelayBoard):
-
-    VALID_RELAY_POSITIONS = [1, 2, 3, 4, 5, 6, 7, 8]
-    NUM_RELAYS = 8
-
-    def __init__(self, config):
-        self.status_dict = dict()
-        self.device = config["device"]
-        super(UsbRelayBoardBase, self).__init__(config)
-        self.address = {
-            1: 0x1,
-            2: 0x2,
-            3: 0x4,
-            4: 0x8,
-            5: 0x10,
-            6: 0x20,
-            7: 0x40,
-            8: 0x80,
-            "select_all": 0xFF
-        }
-
-    def get_relay_position_list(self):
-        return self.VALID_RELAY_POSITIONS
-
-    def test_bit(self, int_type, offset):
-        """Function to get status for the given relay position.
-
-        Args:
-            int_type: Port value for given relay.
-            offset: offset for given Relay_position.
-
-        Returns:
-            returns current status for given relay_position.
-        """
-        mask = 1 << offset
-        return (int_type & mask)
-
-    def _get_relay_state(self, data, relay):
-        """Function to get status for the given relay position.
-
-        Args:
-            data: Port value for given relay.
-            relay: Relay_position.
-
-        Returns:
-            returns current status for given relay_position.
-        """
-        if relay == 1:
-            return self.test_bit(data, 1)
-        if relay == 2:
-            return self.test_bit(data, 3)
-        if relay == 3:
-            return self.test_bit(data, 5)
-        if relay == 4:
-            return self.test_bit(data, 7)
-        if relay == 5:
-            return self.test_bit(data, 2)
-        if relay == 6:
-            return self.test_bit(data, 4)
-        if relay == 7:
-            return self.test_bit(data, 6)
-        if relay == 8:
-            return self.test_bit(data, 8)
-
-    def get_relay_status(self, relay_position):
-        """Get relay status for the given relay position.
-
-        Args:
-            relay_position: Status for given Relay position.
-
-        Returns:
-            returns current status for given relay_position.
-        """
-        with BitBangDevice(self.device) as bb:
-            self.status_dict[relay_position] = self._get_relay_state(
-                bb.port, relay_position)
-        return self.status_dict[relay_position]
-
-    def set(self, relay_position, value):
-        """Returns the current status of the passed in relay.
-
-        Args:
-            relay_position: Relay position.
-            value: Turn_on or Turn_off the relay for the given relay_position.
-        """
-        raise NotImplementedError
diff --git a/src/antlion/controllers/rohdeschwarz_lib/OWNERS b/src/antlion/controllers/rohdeschwarz_lib/OWNERS
deleted file mode 100644
index e4010df..0000000
--- a/src/antlion/controllers/rohdeschwarz_lib/OWNERS
+++ /dev/null
@@ -1,4 +0,0 @@
-iguarna@google.com
-chaoyangf@google.com
-yixiang@google.com
-codycaldwell@google.com
\ No newline at end of file
diff --git a/src/antlion/controllers/rohdeschwarz_lib/__init__.py b/src/antlion/controllers/rohdeschwarz_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/rohdeschwarz_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/rohdeschwarz_lib/cmw500.py b/src/antlion/controllers/rohdeschwarz_lib/cmw500.py
deleted file mode 100644
index 978600d..0000000
--- a/src/antlion/controllers/rohdeschwarz_lib/cmw500.py
+++ /dev/null
@@ -1,1167 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-
-from enum import Enum
-
-from antlion.controllers import abstract_inst
-
-LTE_ATTACH_RESP = 'ATT'
-LTE_CONN_RESP = 'CONN'
-LTE_IDLE_RESP = 'IDLE'
-LTE_PSWITCHED_ON_RESP = 'ON'
-LTE_PSWITCHED_OFF_RESP = 'OFF'
-
-STATE_CHANGE_TIMEOUT = 20
-
-
-class LteState(Enum):
-    """LTE ON and OFF"""
-    LTE_ON = 'ON'
-    LTE_OFF = 'OFF'
-
-
-class BtsNumber(Enum):
-    """Base station Identifiers."""
-    BTS1 = 'PCC'
-    BTS2 = 'SCC1'
-    BTS3 = 'SCC2'
-    BTS4 = 'SCC3'
-    BTS5 = 'SCC4'
-    BTS6 = 'SCC6'
-    BTS7 = 'SCC7'
-
-
-class LteBandwidth(Enum):
-    """Supported LTE bandwidths."""
-    BANDWIDTH_1MHz = 'B014'
-    BANDWIDTH_3MHz = 'B030'
-    BANDWIDTH_5MHz = 'B050'
-    BANDWIDTH_10MHz = 'B100'
-    BANDWIDTH_15MHz = 'B150'
-    BANDWIDTH_20MHz = 'B200'
-
-
-class DuplexMode(Enum):
-    """Duplex Modes"""
-    FDD = 'FDD'
-    TDD = 'TDD'
-
-
-class SchedulingMode(Enum):
-    """Supported scheduling modes."""
-    RMC = 'RMC'
-    USERDEFINEDCH = 'UDCHannels'
-
-
-class TransmissionModes(Enum):
-    """Supported transmission modes."""
-    TM1 = 'TM1'
-    TM2 = 'TM2'
-    TM3 = 'TM3'
-    TM4 = 'TM4'
-    TM7 = 'TM7'
-    TM8 = 'TM8'
-    TM9 = 'TM9'
-
-
-class UseCarrierSpecific(Enum):
-    """Enable or disable carrier specific."""
-    UCS_ON = 'ON'
-    UCS_OFF = 'OFF'
-
-
-class RbPosition(Enum):
-    """Supported RB positions."""
-    LOW = 'LOW'
-    HIGH = 'HIGH'
-    P5 = 'P5'
-    P10 = 'P10'
-    P23 = 'P23'
-    P35 = 'P35'
-    P48 = 'P48'
-
-
-class ModulationType(Enum):
-    """Supported Modulation Types."""
-    QPSK = 'QPSK'
-    Q16 = 'Q16'
-    Q64 = 'Q64'
-    Q256 = 'Q256'
-
-
-class DciFormat(Enum):
-    """Support DCI Formats for MIMOs"""
-    D1 = 'D1'
-    D1A = 'D1A'
-    D1B = 'D1B'
-    D2 = 'D2'
-    D2A = 'D2A'
-    D2B = 'D2B'
-    D2C = 'D2C'
-
-
-class MimoModes(Enum):
-    """MIMO Modes dl antennas"""
-    MIMO1x1 = 'ONE'
-    MIMO2x2 = 'TWO'
-    MIMO4x4 = 'FOUR'
-
-
-class MimoScenario(Enum):
-    """Supported mimo scenarios"""
-    SCEN1x1 = 'SCELl:FLEXible SUA1,RF1C,RX1,RF1C,TX1'
-    SCEN2x2 = 'TRO:FLEXible SUA1,RF1C,RX1,RF1C,TX1,RF3C,TX2'
-    SCEN4x4 = 'FRO FLEXible SUA1,RF1C,RX1,RF1C,TX1,RF3C,TX2,RF2C,TX3,RF4C,TX4'
-
-
-class RrcState(Enum):
-    """States to enable/disable rrc."""
-    RRC_ON = 'ON'
-    RRC_OFF = 'OFF'
-
-
-class MacPadding(Enum):
-    """Enables/Disables Mac Padding."""
-    ON = 'ON'
-    OFF = 'OFF'
-
-
-class ConnectionType(Enum):
-    """Supported Connection Types."""
-    TEST = 'TESTmode'
-    DAU = 'DAPPlication'
-
-
-class RepetitionMode(Enum):
-    """Specifies LTE Measurement Repetition Mode."""
-    SINGLESHOT = 'SINGleshot'
-    CONTINUOUS = 'CONTinuous'
-
-
-class TpcPowerControl(Enum):
-    """Specifies Up Link power control types."""
-    MIN_POWER = 'MINPower'
-    MAX_POWER = 'MAXPower'
-    CONSTANT = 'CONStant'
-    SINGLE = 'SINGle'
-    UDSINGLE = 'UDSingle'
-    UDCONTINUOUS = 'UDContinuous'
-    ALTERNATE = 'ALT0'
-    CLOSED_LOOP = 'CLOop'
-    RP_CONTROL = 'RPControl'
-    FLEX_POWER = 'FULPower'
-
-
-class ReducedPdcch(Enum):
-    """Enables/disables the reduction of PDCCH resources."""
-    ON = 'ON'
-    OFF = 'OFF'
-
-
-class Cmw500(abstract_inst.SocketInstrument):
-
-    def __init__(self, ip_addr, port):
-        """Init method to setup variables for controllers.
-
-        Args:
-              ip_addr: Controller's ip address.
-              port: Port
-        """
-        super(Cmw500, self).__init__(ip_addr, port)
-        self._connect_socket()
-        self._send('*CLS')
-        self._send('*ESE 0;*SRE 0')
-        self._send('*CLS')
-        self._send('*ESE 1;*SRE 4')
-        self._send('SYST:DISP:UPD ON')
-
-    def switch_lte_signalling(self, state):
-        """ Turns LTE signalling ON/OFF.
-
-        Args:
-              state: an instance of LteState indicating the state to which LTE
-                signal has to be set.
-        """
-        if not isinstance(state, LteState):
-            raise ValueError('state should be the instance of LteState.')
-
-        state = state.value
-
-        cmd = 'SOURce:LTE:SIGN:CELL:STATe {}'.format(state)
-        self.send_and_recv(cmd)
-
-        time_elapsed = 0
-        while time_elapsed < STATE_CHANGE_TIMEOUT:
-            response = self.send_and_recv('SOURce:LTE:SIGN:CELL:STATe:ALL?')
-
-            if response == state + ',ADJ':
-                self._logger.info('LTE signalling is now {}.'.format(state))
-                break
-
-            # Wait for a second and increase time count by one
-            time.sleep(1)
-            time_elapsed += 1
-        else:
-            raise CmwError('Failed to turn {} LTE signalling.'.format(state))
-
-    def enable_packet_switching(self):
-        """Enable packet switching in call box."""
-        self.send_and_recv('CALL:LTE:SIGN:PSWitched:ACTion CONNect')
-        self.wait_for_pswitched_state()
-
-    def disable_packet_switching(self):
-        """Disable packet switching in call box."""
-        self.send_and_recv('CALL:LTE:SIGN:PSWitched:ACTion DISConnect')
-        self.wait_for_pswitched_state()
-
-    @property
-    def use_carrier_specific(self):
-        """Gets current status of carrier specific duplex configuration."""
-        return self.send_and_recv('CONFigure:LTE:SIGN:DMODe:UCSPECific?')
-
-    @use_carrier_specific.setter
-    def use_carrier_specific(self, state):
-        """Sets the carrier specific duplex configuration.
-
-        Args:
-            state: ON/OFF UCS configuration.
-        """
-        cmd = 'CONFigure:LTE:SIGN:DMODe:UCSPECific {}'.format(state)
-        self.send_and_recv(cmd)
-
-    def send_and_recv(self, cmd):
-        """Send and recv the status of the command.
-
-        Args:
-            cmd: Command to send.
-
-        Returns:
-            status: returns the status of the command sent.
-        """
-
-        self._send(cmd)
-        if '?' in cmd:
-            status = self._recv()
-            return status
-
-    def configure_mimo_settings(self, mimo):
-        """Sets the mimo scenario for the test.
-
-        Args:
-            mimo: mimo scenario to set.
-        """
-        cmd = 'ROUTe:LTE:SIGN:SCENario:{}'.format(mimo.value)
-        self.send_and_recv(cmd)
-
-    def wait_for_pswitched_state(self, timeout=10):
-        """Wait until pswitched state.
-
-        Args:
-            timeout: timeout for lte pswitched state.
-
-        Raises:
-            CmwError on timeout.
-        """
-        while timeout > 0:
-            state = self.send_and_recv('FETCh:LTE:SIGN:PSWitched:STATe?')
-            if state == LTE_PSWITCHED_ON_RESP:
-                self._logger.debug('Connection to setup initiated.')
-                break
-            elif state == LTE_PSWITCHED_OFF_RESP:
-                self._logger.debug('Connection to setup detached.')
-                break
-
-            # Wait for a second and decrease count by one
-            time.sleep(1)
-            timeout -= 1
-        else:
-            raise CmwError('Failure in setting up/detaching connection')
-
-    def wait_for_attached_state(self, timeout=120):
-        """Attach the controller with device.
-
-        Args:
-            timeout: timeout for phone to get attached.
-
-        Raises:
-            CmwError on time out.
-        """
-        while timeout > 0:
-            state = self.send_and_recv('FETCh:LTE:SIGN:PSWitched:STATe?')
-
-            if state == LTE_ATTACH_RESP:
-                self._logger.debug('Call box attached with device')
-                break
-
-            # Wait for a second and decrease count by one
-            time.sleep(1)
-            timeout -= 1
-        else:
-            raise CmwError('Device could not be attached')
-
-    def wait_for_rrc_state(self, state, timeout=120):
-        """ Waits until a certain RRC state is set.
-
-        Args:
-            state: the RRC state that is being waited for.
-            timeout: timeout for phone to be in connected state.
-
-        Raises:
-            CmwError on time out.
-        """
-        if state not in [LTE_CONN_RESP, LTE_IDLE_RESP]:
-            raise ValueError(
-                'The allowed values for state are {} and {}.'.format(
-                    LTE_CONN_RESP, LTE_IDLE_RESP))
-
-        while timeout > 0:
-            new_state = self.send_and_recv('SENSe:LTE:SIGN:RRCState?')
-
-            if new_state == state:
-                self._logger.debug('The RRC state is {}.'.format(new_state))
-                break
-
-            # Wait for a second and decrease count by one
-            time.sleep(1)
-            timeout -= 1
-        else:
-            raise CmwError('Timeout before RRC state was {}.'.format(state))
-
-    def reset(self):
-        """System level reset"""
-        self.send_and_recv('*RST; *OPC')
-
-    @property
-    def get_instrument_id(self):
-        """Gets instrument identification number"""
-        return self.send_and_recv('*IDN?')
-
-    def disconnect(self):
-        """Disconnect controller from device and switch to local mode."""
-        self.switch_lte_signalling(LteState.LTE_OFF)
-        self.close_remote_mode()
-        self._close_socket()
-
-    def close_remote_mode(self):
-        """Exits remote mode to local mode."""
-        self.send_and_recv('&GTL')
-
-    def detach(self):
-        """Detach callbox and controller."""
-        self.send_and_recv('CALL:LTE:SIGN:PSWitched:ACTion DETach')
-
-    @property
-    def rrc_connection(self):
-        """Gets the RRC connection state."""
-        return self.send_and_recv('CONFigure:LTE:SIGN:CONNection:KRRC?')
-
-    @rrc_connection.setter
-    def rrc_connection(self, state):
-        """Selects whether the RRC connection is kept or released after attach.
-
-        Args:
-            mode: RRC State ON/OFF.
-        """
-        if not isinstance(state, RrcState):
-            raise ValueError('state should be the instance of RrcState.')
-
-        cmd = 'CONFigure:LTE:SIGN:CONNection:KRRC {}'.format(state.value)
-        self.send_and_recv(cmd)
-
-    @property
-    def rrc_connection_timer(self):
-        """Gets the inactivity timeout for disabled rrc connection."""
-        return self.send_and_recv('CONFigure:LTE:SIGN:CONNection:RITimer?')
-
-    @rrc_connection_timer.setter
-    def rrc_connection_timer(self, time_in_secs):
-        """Sets the inactivity timeout for disabled rrc connection. By default
-        the timeout is set to 5.
-
-        Args:
-            time_in_secs: timeout of inactivity in rrc connection.
-        """
-        cmd = 'CONFigure:LTE:SIGN:CONNection:RITimer {}'.format(time_in_secs)
-        self.send_and_recv(cmd)
-
-    @property
-    def dl_mac_padding(self):
-        """Gets the state of mac padding."""
-        return self.send_and_recv('CONFigure:LTE:SIGN:CONNection:DLPadding?')
-
-    @dl_mac_padding.setter
-    def dl_mac_padding(self, state):
-        """Enables/Disables downlink padding at the mac layer.
-
-        Args:
-            state: ON/OFF
-        """
-        cmd = 'CONFigure:LTE:SIGN:CONNection:DLPadding {}'.format(state.value)
-        self.send_and_recv(cmd)
-
-    @property
-    def connection_type(self):
-        """Gets the connection type applied in callbox."""
-        return self.send_and_recv('CONFigure:LTE:SIGN:CONNection:CTYPe?')
-
-    @connection_type.setter
-    def connection_type(self, ctype):
-        """Sets the connection type to be applied.
-
-        Args:
-            ctype: Connection type.
-        """
-        cmd = 'CONFigure:LTE:SIGN:CONNection:CTYPe {}'.format(ctype.value)
-        self.send_and_recv(cmd)
-
-    def get_base_station(self, bts_num=BtsNumber.BTS1):
-        """Gets the base station object based on bts num. By default
-        bts_num set to PCC
-
-        Args:
-            bts_num: base station identifier
-
-        Returns:
-            base station object.
-        """
-        return BaseStation(self, bts_num)
-
-    def init_lte_measurement(self):
-        """Gets the class object for lte measurement which can be used to
-        initiate measurements.
-
-        Returns:
-            lte measurement object.
-        """
-        return LteMeasurement(self)
-
-
-class BaseStation(object):
-    """Class to interact with different base stations"""
-
-    def __init__(self, cmw, bts_num):
-        if not isinstance(bts_num, BtsNumber):
-            raise ValueError('bts_num should be an instance of BtsNumber.')
-        self._bts = bts_num.value
-        self._cmw = cmw
-
-    @property
-    def duplex_mode(self):
-        """Gets current duplex of cell."""
-        cmd = 'CONFigure:LTE:SIGN:{}:DMODe?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @duplex_mode.setter
-    def duplex_mode(self, mode):
-        """Sets the Duplex mode of cell.
-
-        Args:
-            mode: String indicating FDD or TDD.
-        """
-        if not isinstance(mode, DuplexMode):
-            raise ValueError('mode should be an instance of DuplexMode.')
-
-        cmd = 'CONFigure:LTE:SIGN:{}:DMODe {}'.format(self._bts, mode.value)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def band(self):
-        """Gets the current band of cell."""
-        cmd = 'CONFigure:LTE:SIGN:{}:BAND?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @band.setter
-    def band(self, band):
-        """Sets the Band of cell.
-
-        Args:
-            band: band of cell.
-        """
-        cmd = 'CONFigure:LTE:SIGN:{}:BAND {}'.format(self._bts, band)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def dl_channel(self):
-        """Gets the downlink channel of cell."""
-        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:DL?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @dl_channel.setter
-    def dl_channel(self, channel):
-        """Sets the downlink channel number of cell.
-
-        Args:
-            channel: downlink channel number of cell.
-        """
-        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:DL {}'.format(
-            self._bts, channel)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def ul_channel(self):
-        """Gets the uplink channel of cell."""
-        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:UL?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @ul_channel.setter
-    def ul_channel(self, channel):
-        """Sets the up link channel number of cell.
-
-        Args:
-            channel: up link channel number of cell.
-        """
-        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:UL {}'.format(
-            self._bts, channel)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def bandwidth(self):
-        """Get the channel bandwidth of the cell."""
-        cmd = 'CONFigure:LTE:SIGN:CELL:BANDwidth:{}:DL?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @bandwidth.setter
-    def bandwidth(self, bandwidth):
-        """Sets the channel bandwidth of the cell.
-
-        Args:
-            bandwidth: channel bandwidth of cell.
-        """
-        if not isinstance(bandwidth, LteBandwidth):
-            raise ValueError('bandwidth should be an instance of '
-                             'LteBandwidth.')
-        cmd = 'CONFigure:LTE:SIGN:CELL:BANDwidth:{}:DL {}'.format(
-            self._bts, bandwidth.value)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def ul_frequency(self):
-        """Get the uplink frequency of the cell."""
-        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:UL? MHZ'.format(
-            self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @ul_frequency.setter
-    def ul_frequency(self, freq):
-        """Get the uplink frequency of the cell.
-
-        Args:
-            freq: uplink frequency of the cell.
-        """
-        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:UL {} MHZ'.format(
-            self._bts, freq)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def dl_frequency(self):
-        """Get the downlink frequency of the cell"""
-        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:DL? MHZ'.format(
-            self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @dl_frequency.setter
-    def dl_frequency(self, freq):
-        """Get the downlink frequency of the cell.
-
-        Args:
-            freq: downlink frequency of the cell.
-        """
-        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:DL {} MHZ'.format(
-            self._bts, freq)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def transmode(self):
-        """Gets the TM of cell."""
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:TRANsmission?'.format(
-            self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @transmode.setter
-    def transmode(self, tm_mode):
-        """Sets the TM of cell.
-
-        Args:
-            tm_mode: TM of cell.
-        """
-        if not isinstance(tm_mode, TransmissionModes):
-            raise ValueError('tm_mode should be an instance of '
-                             'Transmission modes.')
-
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:TRANsmission {}'.format(
-            self._bts, tm_mode.value)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def downlink_power_level(self):
-        """Gets RSPRE level."""
-        cmd = 'CONFigure:LTE:SIGN:DL:{}:RSEPre:LEVel?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @downlink_power_level.setter
-    def downlink_power_level(self, pwlevel):
-        """Modifies RSPRE level.
-
-        Args:
-            pwlevel: power level in dBm.
-        """
-        cmd = 'CONFigure:LTE:SIGN:DL:{}:RSEPre:LEVel {}'.format(
-            self._bts, pwlevel)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def uplink_power_control(self):
-        """Gets open loop nominal power directly."""
-        cmd = 'CONFigure:LTE:SIGN:UL:{}:PUSCh:OLNPower?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @uplink_power_control.setter
-    def uplink_power_control(self, ul_power):
-        """Sets open loop nominal power directly.
-
-        Args:
-            ul_power: uplink power level.
-        """
-        cmd = 'CONFigure:LTE:SIGN:UL:{}:PUSCh:OLNPower {}'.format(
-            self._bts, ul_power)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def uldl_configuration(self):
-        """Gets uldl configuration of the cell."""
-        cmd = 'CONFigure:LTE:SIGN:CELL:{}:ULDL?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @uldl_configuration.setter
-    def uldl_configuration(self, uldl):
-        """Sets the ul-dl configuration.
-
-        Args:
-            uldl: Configuration value ranging from 0 to 6.
-        """
-        if uldl not in range(0, 7):
-            raise ValueError('uldl configuration value should be between'
-                             ' 0 and 6 inclusive.')
-
-        cmd = 'CONFigure:LTE:SIGN:CELL:{}:ULDL {}'.format(self._bts, uldl)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def tdd_special_subframe(self):
-        """Gets special subframe of the cell."""
-        cmd = 'CONFigure:LTE:SIGN:CELL:{}:SSUBframe?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @tdd_special_subframe.setter
-    def tdd_special_subframe(self, sframe):
-        """Sets the tdd special subframe of the cell.
-
-        Args:
-            sframe: Integer value ranging from 1 to 9.
-        """
-        if sframe not in range(0, 10):
-            raise ValueError('tdd special subframe should be between 0 and 9'
-                             ' inclusive.')
-
-        cmd = 'CONFigure:LTE:SIGN:CELL:{}:SSUBframe {}'.format(
-            self._bts, sframe)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def scheduling_mode(self):
-        """Gets the current scheduling mode."""
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:STYPe?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @scheduling_mode.setter
-    def scheduling_mode(self, mode):
-        """Sets the scheduling type for the cell.
-
-        Args:
-            mode: Selects the channel mode to be scheduled.
-        """
-        if not isinstance(mode, SchedulingMode):
-            raise ValueError('mode should be the instance of scheduling mode.')
-
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:STYPe {}'.format(
-            self._bts, mode.value)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def rb_configuration_dl(self):
-        """Gets rmc's rb configuration for down link. This function returns
-        Number of Resource blocks, Resource block position and Modulation type.
-        """
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:{}:DL?'.format(
-            self._bts, self.scheduling_mode)
-        return self._cmw.send_and_recv(cmd)
-
-    @rb_configuration_dl.setter
-    def rb_configuration_dl(self, rb_config):
-        """Sets the rb configuration for down link for scheduling type.
-
-        Args:
-            rb_config: Tuple containing Number of resource blocks, resource
-            block position and modulation type.
-
-        Raises:
-            ValueError: If tuple unpacking fails.
-        """
-        if self.scheduling_mode == 'RMC':
-            rb, rb_pos, modulation = rb_config
-
-            cmd = ('CONFigure:LTE:SIGN:CONNection:{}:RMC:DL {},{},'
-                   '{}'.format(self._bts, rb, rb_pos, modulation))
-            self._cmw.send_and_recv(cmd)
-
-        elif self.scheduling_mode == 'UDCH':
-            rb, start_rb, modulation, tbs = rb_config
-
-            self.validate_rb(rb)
-
-            if not isinstance(modulation, ModulationType):
-                raise ValueError('Modulation should be of type '
-                                 'ModulationType.')
-
-            cmd = ('CONFigure:LTE:SIGN:CONNection:{}:UDCHannels:DL {},{},'
-                   '{},{}'.format(self._bts, rb, start_rb, modulation.value,
-                                  tbs))
-            self._cmw.send_and_recv(cmd)
-
-    @property
-    def rb_configuration_ul(self):
-        """Gets rb configuration for up link. This function returns
-        Number of Resource blocks, Resource block position and Modulation type.
-        """
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:{}:UL?'.format(
-            self._bts, self.scheduling_mode)
-        return self._cmw.send_and_recv(cmd)
-
-    @rb_configuration_ul.setter
-    def rb_configuration_ul(self, rb_config):
-        """Sets the rb configuration for down link for scheduling mode.
-
-        Args:
-            rb_config: Tuple containing Number of resource blocks, resource
-            block position and modulation type.
-
-        Raises:
-            ValueError: If tuple unpacking fails.
-        """
-        if self.scheduling_mode == 'RMC':
-            rb, rb_pos, modulation = rb_config
-
-            cmd = ('CONFigure:LTE:SIGN:CONNection:{}:RMC:UL {},{},'
-                   '{}'.format(self._bts, rb, rb_pos, modulation))
-            self._cmw.send_and_recv(cmd)
-
-        elif self.scheduling_mode == 'UDCH':
-            rb, start_rb, modulation, tbs = rb_config
-
-            self.validate_rb(rb)
-
-            if not isinstance(modulation, ModulationType):
-                raise ValueError('Modulation should be of type '
-                                 'ModulationType.')
-            cmd = ('CONFigure:LTE:SIGN:CONNection:{}:UDCHannels:UL {},{},'
-                   '{},{}'.format(self._bts, rb, start_rb, modulation.value,
-                                  tbs))
-            self._cmw.send_and_recv(cmd)
-
-    def validate_rb(self, rb):
-        """Validates if rb is within the limits for bandwidth set.
-
-        Args:
-            rb: No. of resource blocks.
-
-        Raises:
-            ValueError if rb out of range.
-        """
-        bandwidth = self.bandwidth
-
-        if bandwidth == LteBandwidth.BANDWIDTH_1MHz.value:
-            if not 0 <= rb <= 6:
-                raise ValueError('RB should be between 0 to 6 inclusive'
-                                 ' for 1.4Mhz.')
-        elif bandwidth == LteBandwidth.BANDWIDTH_3MHz.value:
-            if not 0 <= rb <= 10:
-                raise ValueError('RB should be between 0 to 10 inclusive'
-                                 ' for 3 Mhz.')
-        elif bandwidth == LteBandwidth.BANDWIDTH_5MHz.value:
-            if not 0 <= rb <= 25:
-                raise ValueError('RB should be between 0 to 25 inclusive'
-                                 ' for 5 Mhz.')
-        elif bandwidth == LteBandwidth.BANDWIDTH_10MHz.value:
-            if not 0 <= rb <= 50:
-                raise ValueError('RB should be between 0 to 50 inclusive'
-                                 ' for 10 Mhz.')
-        elif bandwidth == LteBandwidth.BANDWIDTH_15MHz.value:
-            if not 0 <= rb <= 75:
-                raise ValueError('RB should be between 0 to 75 inclusive'
-                                 ' for 15 Mhz.')
-        elif bandwidth == LteBandwidth.BANDWIDTH_20MHz.value:
-            if not 0 <= rb <= 100:
-                raise ValueError('RB should be between 0 to 100 inclusive'
-                                 ' for 20 Mhz.')
-
-    @property
-    def rb_position_dl(self):
-        """Gets the position of the allocated down link resource blocks within
-        the channel band-width.
-        """
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:RMC:RBPosition:DL?'.format(
-            self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @rb_position_dl.setter
-    def rb_position_dl(self, rbpos):
-        """Selects the position of the allocated down link resource blocks
-        within the channel band-width
-
-        Args:
-            rbpos: position of resource blocks.
-        """
-        if not isinstance(rbpos, RbPosition):
-            raise ValueError('rbpos should be the instance of RbPosition.')
-
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:RMC:RBPosition:DL {}'.format(
-            self._bts, rbpos.value)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def rb_position_ul(self):
-        """Gets the position of the allocated up link resource blocks within
-        the channel band-width.
-        """
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:RMC:RBPosition:UL?'.format(
-            self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @rb_position_ul.setter
-    def rb_position_ul(self, rbpos):
-        """Selects the position of the allocated up link resource blocks
-        within the channel band-width.
-
-        Args:
-            rbpos: position of resource blocks.
-        """
-        if not isinstance(rbpos, RbPosition):
-            raise ValueError('rbpos should be the instance of RbPosition.')
-
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:RMC:RBPosition:UL {}'.format(
-            self._bts, rbpos.value)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def dci_format(self):
-        """Gets the downlink control information (DCI) format."""
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:DCIFormat?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @dci_format.setter
-    def dci_format(self, dci_format):
-        """Selects the downlink control information (DCI) format.
-
-        Args:
-            dci_format: supported dci.
-        """
-        if not isinstance(dci_format, DciFormat):
-            raise ValueError('dci_format should be the instance of DciFormat.')
-
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:DCIFormat {}'.format(
-            self._bts, dci_format)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def dl_antenna(self):
-        """Gets dl antenna count of cell."""
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:NENBantennas?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @dl_antenna.setter
-    def dl_antenna(self, num_antenna):
-        """Sets the dl antenna count of cell.
-
-        Args:
-            num_antenna: Count of number of dl antennas to use.
-        """
-        if not isinstance(num_antenna, MimoModes):
-            raise ValueError('num_antenna should be an instance of MimoModes.')
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:NENBantennas {}'.format(
-            self._bts, num_antenna)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def reduced_pdcch(self):
-        """Gets the reduction of PDCCH resources state."""
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:PDCCh:RPDCch?'.format(
-            self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @reduced_pdcch.setter
-    def reduced_pdcch(self, state):
-        """Sets the reduction of PDCCH resources state.
-
-        Args:
-            state: ON/OFF.
-        """
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:PDCCh:RPDCch {}'.format(
-            self._bts, state.value)
-        self._cmw.send_and_recv(cmd)
-
-    def tpc_power_control(self, set_type):
-        """Set and execute the Up Link Power Control via TPC.
-
-        Args:
-            set_type: Type of tpc power control.
-        """
-
-        if not isinstance(set_type, TpcPowerControl):
-            raise ValueError('set_type should be the instance of '
-                             'TpCPowerControl.')
-        cmd = 'CONFigure:LTE:SIGN:UL:{}:PUSCh:TPC:SET {}'.format(
-            self._bts, set_type.value)
-        self._cmw.send_and_recv(cmd)
-        cmd = 'CONFigure:LTE:SIGN:UL:{}:PUSCh:TPC:PEXecute'.format(self._bts)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def tpc_closed_loop_target_power(self):
-        """Gets the target powers for power control with the TPC setup."""
-        cmd = 'CONFigure:LTE:SIGN:UL:{}:PUSCh:TPC:CLTPower?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @tpc_closed_loop_target_power.setter
-    def tpc_closed_loop_target_power(self, cltpower):
-        """Sets the target powers for power control with the TPC setup.
-
-        Args:
-            tpower: Target power.
-        """
-        cmd = 'CONFigure:LTE:SIGN:UL:{}:PUSCh:TPC:CLTPower {}'.format(
-            self._bts, cltpower)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def drx_connected_mode(self):
-        """ Gets the Connected DRX LTE cell parameter
-
-        Args:
-            None
-
-        Returns:
-            DRX connected mode (OFF, AUTO, MANUAL)
-        """
-        raise NotImplementedError()
-
-    @drx_connected_mode.setter
-    def drx_connected_mode(self, mode):
-        """  Sets the Connected DRX LTE cell parameter
-
-        Args:
-            mode: DRX Connected mode
-
-        Returns:
-            None
-        """
-        raise NotImplementedError()
-
-    @property
-    def drx_on_duration_timer(self):
-        """ Gets the amount of PDCCH subframes to wait for data after
-            waking up from a DRX cycle
-
-        Args:
-            None
-
-        Returns:
-            DRX mode duration timer
-        """
-        raise NotImplementedError()
-
-    @drx_on_duration_timer.setter
-    def drx_on_duration_timer(self, time):
-        """ Sets the amount of PDCCH subframes to wait for data after
-            waking up from a DRX cycle
-
-        Args:
-            timer: Length of interval to wait for user data to be transmitted
-
-        Returns:
-            None
-        """
-        raise NotImplementedError()
-
-    @property
-    def drx_inactivity_timer(self):
-        """ Gets the number of PDCCH subframes to wait before entering DRX mode
-
-        Args:
-            None
-
-        Returns:
-            DRX mode inactivity timer
-        """
-        raise NotImplementedError()
-
-    @drx_inactivity_timer.setter
-    def drx_inactivity_timer(self, time):
-        """ Sets the number of PDCCH subframes to wait before entering DRX mode
-
-        Args:
-            timer: Length of the interval to wait
-
-        Returns:
-            None
-        """
-        raise NotImplementedError()
-
-    @property
-    def drx_retransmission_timer(self):
-        """ Gets the number of consecutive PDCCH subframes to wait
-        for retransmission
-
-        Args:
-            None
-
-        Returns:
-            Number of PDCCH subframes to wait for retransmission
-        """
-        raise NotImplementedError()
-
-    @drx_retransmission_timer.setter
-    def drx_retransmission_timer(self, time):
-        """ Sets the number of consecutive PDCCH subframes to wait
-        for retransmission
-
-        Args:
-            time: Number of PDCCH subframes to wait
-            for retransmission
-
-        Returns:
-            None
-        """
-        raise NotImplementedError()
-
-    @property
-    def drx_long_cycle(self):
-        """ Gets the amount of subframes representing a DRX long cycle
-
-        Args:
-            None
-
-        Returns:
-            The amount of subframes representing one long DRX cycle.
-            One cycle consists of DRX sleep + DRX on duration
-        """
-        raise NotImplementedError()
-
-    @drx_long_cycle.setter
-    def drx_long_cycle(self, time):
-        """ Sets the amount of subframes representing a DRX long cycle
-
-        Args:
-            long_cycle: The amount of subframes representing one long DRX cycle.
-                One cycle consists of DRX sleep + DRX on duration
-
-        Returns:
-            None
-        """
-        raise NotImplementedError()
-
-    @property
-    def drx_long_cycle_offset(self):
-        """ Gets the offset used to determine long cycle starting
-        subframe
-
-        Args:
-            None
-
-        Returns:
-            Long cycle offset
-        """
-        raise NotImplementedError()
-
-    @drx_long_cycle_offset.setter
-    def drx_long_cycle_offset(self, offset):
-        """ Sets the offset used to determine long cycle starting
-        subframe
-
-        Args:
-            offset: Number in range 0...(long cycle - 1)
-        """
-        raise NotImplementedError()
-
-
-
-class LteMeasurement(object):
-
-    def __init__(self, cmw):
-        self._cmw = cmw
-
-    def intitilize_measurement(self):
-        """Initialize measurement modules."""
-        self._cmw.send_and_recv('INIT:LTE:MEAS:MEValuation')
-
-    @property
-    def measurement_repetition(self):
-        """Returns the measurement repetition mode that has been set."""
-        return self._cmw.send_and_recv(
-            'CONFigure:LTE:MEAS:MEValuation:REPetition?')
-
-    @measurement_repetition.setter
-    def measurement_repetition(self, mode):
-        """Sets the mode for measuring power levels.
-
-        Args:
-            mode: Single shot/continuous.
-        """
-        if not isinstance(mode, RepetitionMode):
-            raise ValueError('mode should be the instance of Repetition Mode')
-
-        cmd = 'CONFigure:LTE:MEAS:MEValuation:REPetition {}'.format(mode.value)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def query_measurement_state(self):
-        """Returns the states and sub states of measurement."""
-        return self._cmw.send_and_recv('FETCh:LTE:MEAS:MEValuation:STATe:ALL?')
-
-    @property
-    def measure_tx_power(self):
-        """Return the current Tx power measurement."""
-        return self._cmw.send_and_recv(
-            'FETCh:LTE:MEAS:MEValuation:PMONitor:AVERage?')
-
-    def stop_measurement(self):
-        """Stops the on-going measurement.
-        This function call does not free up resources allocated for
-        measurement. Instead it moves from RUN to RDY state.
-        """
-        self._cmw.send_and_recv('STOP:LTE:MEAS:MEValuation')
-
-    def abort_measurement(self):
-        """Aborts the measurement abruptly.
-        This function call will free up the resources allocated for
-        measurement and all the results will be wiped off.
-        """
-        self._cmw.send_and_recv('ABORt:LTE:MEAS:MEValuation')
-
-
-class CmwError(Exception):
-    """Class to raise exceptions related to cmw."""
diff --git a/src/antlion/controllers/rohdeschwarz_lib/cmw500_cellular_simulator.py b/src/antlion/controllers/rohdeschwarz_lib/cmw500_cellular_simulator.py
deleted file mode 100644
index a65042d..0000000
--- a/src/antlion/controllers/rohdeschwarz_lib/cmw500_cellular_simulator.py
+++ /dev/null
@@ -1,579 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import time
-
-from antlion.controllers.rohdeschwarz_lib import cmw500
-from antlion.controllers import cellular_simulator as cc
-from antlion.controllers.cellular_lib import LteSimulation
-
-CMW_TM_MAPPING = {
-    LteSimulation.TransmissionMode.TM1: cmw500.TransmissionModes.TM1,
-    LteSimulation.TransmissionMode.TM2: cmw500.TransmissionModes.TM2,
-    LteSimulation.TransmissionMode.TM3: cmw500.TransmissionModes.TM3,
-    LteSimulation.TransmissionMode.TM4: cmw500.TransmissionModes.TM4,
-    LteSimulation.TransmissionMode.TM7: cmw500.TransmissionModes.TM7,
-    LteSimulation.TransmissionMode.TM8: cmw500.TransmissionModes.TM8,
-    LteSimulation.TransmissionMode.TM9: cmw500.TransmissionModes.TM9
-}
-
-CMW_SCH_MAPPING = {
-    LteSimulation.SchedulingMode.STATIC: cmw500.SchedulingMode.USERDEFINEDCH
-}
-
-CMW_MIMO_MAPPING = {
-    LteSimulation.MimoMode.MIMO_1x1: cmw500.MimoModes.MIMO1x1,
-    LteSimulation.MimoMode.MIMO_2x2: cmw500.MimoModes.MIMO2x2,
-    LteSimulation.MimoMode.MIMO_4x4: cmw500.MimoModes.MIMO4x4
-}
-
-# get mcs vs tbsi map with 256-qam disabled(downlink)
-get_mcs_tbsi_map_dl = {
-    cmw500.ModulationType.QPSK: {
-        0: 0,
-        1: 1,
-        2: 2,
-        3: 3,
-        4: 4,
-        5: 5,
-        6: 6,
-        7: 7,
-        8: 8,
-        9: 9
-    },
-    cmw500.ModulationType.Q16: {
-        10: 9,
-        11: 10,
-        12: 11,
-        13: 12,
-        14: 13,
-        15: 14,
-        16: 15
-    },
-    cmw500.ModulationType.Q64: {
-        17: 15,
-        18: 16,
-        19: 17,
-        20: 18,
-        21: 19,
-        22: 20,
-        23: 21,
-        24: 22,
-        25: 23,
-        26: 24,
-        27: 25,
-        28: 26
-    }
-}
-
-# get mcs vs tbsi map with 256-qam enabled(downlink)
-get_mcs_tbsi_map_for_256qam_dl = {
-    cmw500.ModulationType.QPSK: {
-        0: 0,
-        1: 2,
-        2: 4,
-        3: 6,
-        4: 8,
-    },
-    cmw500.ModulationType.Q16: {
-        5: 10,
-        6: 11,
-        7: 12,
-        8: 13,
-        9: 14,
-        10: 15
-    },
-    cmw500.ModulationType.Q64: {
-        11: 16,
-        12: 17,
-        13: 18,
-        14: 19,
-        15: 20,
-        16: 21,
-        17: 22,
-        18: 23,
-        19: 24
-    },
-    cmw500.ModulationType.Q256: {
-        20: 25,
-        21: 27,
-        22: 28,
-        23: 29,
-        24: 30,
-        25: 31,
-        26: 32,
-        27: 33
-    }
-}
-
-# get mcs vs tbsi map (uplink)
-get_mcs_tbsi_map_ul = {
-    cmw500.ModulationType.QPSK: {
-        0: 0,
-        1: 1,
-        2: 2,
-        3: 3,
-        4: 4,
-        5: 5,
-        6: 6,
-        7: 7,
-        8: 8,
-        9: 9
-    },
-    cmw500.ModulationType.Q16: {
-        10: 10,
-        11: 10,
-        12: 11,
-        13: 12,
-        14: 13,
-        15: 14,
-        16: 15,
-        17: 16,
-        18: 17,
-        19: 18,
-        20: 19,
-        21: 19,
-        22: 20,
-        23: 21,
-        24: 22,
-        25: 23,
-        26: 24,
-        27: 25,
-        28: 26
-    }
-}
-
-
-class CMW500CellularSimulator(cc.AbstractCellularSimulator):
-    """ A cellular simulator for telephony simulations based on the CMW 500
-    controller. """
-
-    # The maximum number of carriers that this simulator can support for LTE
-    LTE_MAX_CARRIERS = 1
-
-    def __init__(self, ip_address, port):
-        """ Initializes the cellular simulator.
-
-        Args:
-            ip_address: the ip address of the CMW500
-            port: the port number for the CMW500 controller
-        """
-        super().__init__()
-
-        try:
-            self.cmw = cmw500.Cmw500(ip_address, port)
-        except cmw500.CmwError:
-            raise cc.CellularSimulatorError('Could not connect to CMW500.')
-
-        self.bts = None
-        self.dl_modulation = None
-        self.ul_modulation = None
-
-    def destroy(self):
-        """ Sends finalization commands to the cellular equipment and closes
-        the connection. """
-        self.cmw.disconnect()
-
-    def setup_lte_scenario(self):
-        """ Configures the equipment for an LTE simulation. """
-        self.cmw.connection_type = cmw500.ConnectionType.DAU
-        self.bts = [self.cmw.get_base_station()]
-        self.cmw.switch_lte_signalling(cmw500.LteState.LTE_ON)
-
-    def set_band_combination(self, bands):
-        """ Prepares the test equipment for the indicated band combination.
-
-        Args:
-            bands: a list of bands represented as ints or strings
-        """
-        self.num_carriers = len(bands)
-
-    def set_lte_rrc_state_change_timer(self, enabled, time=10):
-        """ Configures the LTE RRC state change timer.
-
-        Args:
-            enabled: a boolean indicating if the timer should be on or off.
-            time: time in seconds for the timer to expire
-        """
-        if enabled:
-            self.cmw.rrc_connection = cmw500.RrcState.RRC_OFF
-            self.cmw.rrc_connection_timer = time
-        else:
-            self.cmw.rrc_connection = cmw500.RrcState.RRC_ON
-
-    def set_band(self, bts_index, band):
-        """ Sets the band for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            band: the new band
-        """
-        bts = self.bts[bts_index]
-        bts.duplex_mode = self.get_duplex_mode(band)
-        band = 'OB' + band
-        bts.band = band
-        self.log.debug('Band set to {}'.format(band))
-
-    def get_duplex_mode(self, band):
-        """ Determines if the band uses FDD or TDD duplex mode
-
-        Args:
-            band: a band number
-
-        Returns:
-            an variable of class DuplexMode indicating if band is FDD or TDD
-        """
-        if 33 <= int(band) <= 46:
-            return cmw500.DuplexMode.TDD
-        else:
-            return cmw500.DuplexMode.FDD
-
-    def set_input_power(self, bts_index, input_power):
-        """ Sets the input power for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            input_power: the new input power
-        """
-        bts = self.bts[bts_index]
-        if input_power > 23:
-            self.log.warning('Open loop supports-50dBm to 23 dBm. '
-                             'Setting it to max power 23 dBm')
-            input_power = 23
-        bts.uplink_power_control = input_power
-        bts.tpc_power_control = cmw500.TpcPowerControl.CLOSED_LOOP
-        bts.tpc_closed_loop_target_power = input_power
-
-    def set_output_power(self, bts_index, output_power):
-        """ Sets the output power for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            output_power: the new output power
-        """
-        bts = self.bts[bts_index]
-        bts.downlink_power_level = output_power
-
-    def set_tdd_config(self, bts_index, tdd_config):
-        """ Sets the tdd configuration number for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            tdd_config: the new tdd configuration number
-        """
-        self.bts[bts_index].uldl_configuration = tdd_config
-
-    def set_ssf_config(self, bts_index, ssf_config):
-        """ Sets the Special Sub-Frame config number for the indicated
-        base station.
-
-        Args:
-            bts_index: the base station number
-            ssf_config: the new ssf config number
-        """
-        if not 0 <= ssf_config <= 9:
-            raise ValueError('The Special Sub-Frame configuration has to be a '
-                             'number between 0 and 9.')
-
-        self.bts[bts_index].tdd_special_subframe = ssf_config
-
-    def set_bandwidth(self, bts_index, bandwidth):
-        """ Sets the bandwidth for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            bandwidth: the new bandwidth
-        """
-        bts = self.bts[bts_index]
-
-        if bandwidth == 20:
-            bts.bandwidth = cmw500.LteBandwidth.BANDWIDTH_20MHz
-        elif bandwidth == 15:
-            bts.bandwidth = cmw500.LteBandwidth.BANDWIDTH_15MHz
-        elif bandwidth == 10:
-            bts.bandwidth = cmw500.LteBandwidth.BANDWIDTH_10MHz
-        elif bandwidth == 5:
-            bts.bandwidth = cmw500.LteBandwidth.BANDWIDTH_5MHz
-        elif bandwidth == 3:
-            bts.bandwidth = cmw500.LteBandwidth.BANDWIDTH_3MHz
-        elif bandwidth == 1.4:
-            bts.bandwidth = cmw500.LteBandwidth.BANDWIDTH_1MHz
-        else:
-            msg = 'Bandwidth {} MHz is not valid for LTE'.format(bandwidth)
-            raise ValueError(msg)
-
-    def set_downlink_channel_number(self, bts_index, channel_number):
-        """ Sets the downlink channel number for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            channel_number: the new channel number
-        """
-        bts = self.bts[bts_index]
-        bts.dl_channel = channel_number
-        self.log.debug('Downlink Channel set to {}'.format(bts.dl_channel))
-
-    def set_mimo_mode(self, bts_index, mimo_mode):
-        """ Sets the mimo mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            mimo_mode: the new mimo mode
-        """
-        bts = self.bts[bts_index]
-        mimo_mode = CMW_MIMO_MAPPING[mimo_mode]
-        if mimo_mode == cmw500.MimoModes.MIMO1x1:
-            self.cmw.configure_mimo_settings(cmw500.MimoScenario.SCEN1x1)
-            bts.dl_antenna = cmw500.MimoModes.MIMO1x1
-
-        elif mimo_mode == cmw500.MimoModes.MIMO2x2:
-            self.cmw.configure_mimo_settings(cmw500.MimoScenario.SCEN2x2)
-            bts.dl_antenna = cmw500.MimoModes.MIMO2x2
-
-        elif mimo_mode == cmw500.MimoModes.MIMO4x4:
-            self.cmw.configure_mimo_settings(cmw500.MimoScenario.SCEN4x4)
-            bts.dl_antenna = cmw500.MimoModes.MIMO4x4
-        else:
-            raise RuntimeError('The requested MIMO mode is not supported.')
-
-    def set_transmission_mode(self, bts_index, tmode):
-        """ Sets the transmission mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            tmode: the new transmission mode
-        """
-        bts = self.bts[bts_index]
-
-        tmode = CMW_TM_MAPPING[tmode]
-
-        if (tmode in [
-                cmw500.TransmissionModes.TM1, cmw500.TransmissionModes.TM7
-        ] and bts.dl_antenna == cmw500.MimoModes.MIMO1x1.value):
-            bts.transmode = tmode
-        elif (tmode.value in cmw500.TransmissionModes.__members__
-              and bts.dl_antenna == cmw500.MimoModes.MIMO2x2.value):
-            bts.transmode = tmode
-        elif (tmode in [
-                cmw500.TransmissionModes.TM2, cmw500.TransmissionModes.TM3,
-                cmw500.TransmissionModes.TM4, cmw500.TransmissionModes.TM9
-        ] and bts.dl_antenna == cmw500.MimoModes.MIMO4x4.value):
-            bts.transmode = tmode
-
-        else:
-            raise ValueError('Transmission modes should support the current '
-                             'mimo mode')
-
-    def set_scheduling_mode(self,
-                            bts_index,
-                            scheduling,
-                            mcs_dl=None,
-                            mcs_ul=None,
-                            nrb_dl=None,
-                            nrb_ul=None):
-        """ Sets the scheduling mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            scheduling: the new scheduling mode.
-            mcs_dl: Downlink MCS.
-            mcs_ul: Uplink MCS.
-            nrb_dl: Number of RBs for downlink.
-            nrb_ul: Number of RBs for uplink.
-        """
-        bts = self.bts[bts_index]
-        bts.reduced_pdcch = cmw500.ReducedPdcch.ON
-
-        scheduling = CMW_SCH_MAPPING[scheduling]
-        bts.scheduling_mode = scheduling
-
-        if not (self.ul_modulation and self.dl_modulation):
-            raise ValueError('Modulation should be set prior to scheduling '
-                             'call')
-
-        if scheduling == cmw500.SchedulingMode.RMC:
-
-            if not nrb_ul and nrb_dl:
-                raise ValueError('nrb_ul and nrb dl should not be none')
-
-            bts.rb_configuration_ul = (nrb_ul, self.ul_modulation, 'KEEP')
-            self.log.info('ul rb configurations set to {}'.format(
-                bts.rb_configuration_ul))
-
-            time.sleep(1)
-
-            self.log.debug('Setting rb configurations for down link')
-            bts.rb_configuration_dl = (nrb_dl, self.dl_modulation, 'KEEP')
-            self.log.info('dl rb configurations set to {}'.format(
-                bts.rb_configuration_ul))
-
-        elif scheduling == cmw500.SchedulingMode.USERDEFINEDCH:
-
-            if not all([nrb_ul, nrb_dl, mcs_dl, mcs_ul]):
-                raise ValueError('All parameters are mandatory.')
-
-            tbs = get_mcs_tbsi_map_ul[self.ul_modulation][mcs_ul]
-
-            bts.rb_configuration_ul = (nrb_ul, 0, self.ul_modulation, tbs)
-            self.log.info('ul rb configurations set to {}'.format(
-                bts.rb_configuration_ul))
-
-            time.sleep(1)
-
-            if self.dl_256_qam_enabled:
-                tbs = get_mcs_tbsi_map_for_256qam_dl[
-                    self.dl_modulation][mcs_dl]
-            else:
-                tbs = get_mcs_tbsi_map_dl[self.dl_modulation][mcs_dl]
-
-            bts.rb_configuration_dl = (nrb_dl, 0, self.dl_modulation, tbs)
-            self.log.info('dl rb configurations set to {}'.format(
-                bts.rb_configuration_dl))
-
-    def set_dl_256_qam_enabled(self, bts_index, enabled):
-        """ Determines what MCS table should be used for the downlink.
-        This only saves the setting that will be used when configuring MCS.
-
-        Args:
-            bts_index: the base station number
-            enabled: whether 256 QAM should be used
-        """
-        self.log.info('Set 256 QAM DL MCS enabled: ' + str(enabled))
-        self.dl_modulation = cmw500.ModulationType.Q256 if enabled \
-            else cmw500.ModulationType.Q64
-        self.dl_256_qam_enabled = enabled
-
-    def set_ul_64_qam_enabled(self, bts_index, enabled):
-        """ Determines what MCS table should be used for the uplink.
-        This only saves the setting that will be used when configuring MCS.
-
-        Args:
-            bts_index: the base station number
-            enabled: whether 64 QAM should be used
-        """
-        self.log.info('Set 64 QAM UL MCS enabled: ' + str(enabled))
-        self.ul_modulation = cmw500.ModulationType.Q64 if enabled \
-            else cmw500.ModulationType.Q16
-        self.ul_64_qam_enabled = enabled
-
-    def set_mac_padding(self, bts_index, mac_padding):
-        """ Enables or disables MAC padding in the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            mac_padding: the new MAC padding setting
-        """
-        # TODO (b/143918664): CMW500 doesn't have an equivalent setting.
-
-    def set_cfi(self, bts_index, cfi):
-        """ Sets the Channel Format Indicator for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            cfi: the new CFI setting
-        """
-        # TODO (b/143497738): implement.
-        self.log.error('Setting CFI is not yet implemented in the CMW500 '
-                       'controller.')
-
-    def set_paging_cycle(self, bts_index, cycle_duration):
-        """ Sets the paging cycle duration for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            cycle_duration: the new paging cycle duration in milliseconds
-        """
-        # TODO (b/146068532): implement.
-        self.log.error('Setting the paging cycle duration is not yet '
-                       'implemented in the CMW500 controller.')
-
-    def set_phich_resource(self, bts_index, phich):
-        """ Sets the PHICH Resource setting for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            phich: the new PHICH resource setting
-        """
-        self.log.error('Configuring the PHICH resource setting is not yet '
-                       'implemented in the CMW500 controller.')
-
-    def lte_attach_secondary_carriers(self, ue_capability_enquiry):
-        """ Activates the secondary carriers for CA. Requires the DUT to be
-        attached to the primary carrier first.
-
-        Args:
-            ue_capability_enquiry: UE capability enquiry message to be sent to
-        the UE before starting carrier aggregation.
-        """
-        raise NotImplementedError()
-
-    def wait_until_attached(self, timeout=120):
-        """ Waits until the DUT is attached to the primary carrier.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        try:
-            self.cmw.wait_for_attached_state(timeout=timeout)
-        except cmw500.CmwError:
-            raise cc.CellularSimulatorError('The phone was not in '
-                                            'attached state before '
-                                            'the timeout period ended.')
-
-    def wait_until_communication_state(self, timeout=120):
-        """ Waits until the DUT is in Communication state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        try:
-            self.cmw.wait_for_rrc_state(cmw500.LTE_CONN_RESP, timeout=timeout)
-        except cmw500.CmwError:
-            raise cc.CellularSimulatorError('The phone was not in '
-                                            'Communication state before '
-                                            'the timeout period ended.')
-
-    def wait_until_idle_state(self, timeout=120):
-        """ Waits until the DUT is in Idle state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        try:
-            self.cmw.wait_for_rrc_state(cmw500.LTE_IDLE_RESP, timeout=timeout)
-        except cmw500.CmwError:
-            raise cc.CellularSimulatorError('The phone was not in '
-                                            'Idle state before '
-                                            'the timeout period ended.')
-
-    def detach(self):
-        """ Turns off all the base stations so the DUT loose connection."""
-        self.cmw.detach()
-
-    def stop(self):
-        """ Stops current simulation. After calling this method, the simulator
-        will need to be set up again. """
-        raise NotImplementedError()
-
-    def start_data_traffic(self):
-        """ Starts transmitting data from the instrument to the DUT. """
-        raise NotImplementedError()
-
-    def stop_data_traffic(self):
-        """ Stops transmitting data from the instrument to the DUT. """
-        raise NotImplementedError()
diff --git a/src/antlion/controllers/rohdeschwarz_lib/cmx500.py b/src/antlion/controllers/rohdeschwarz_lib/cmx500.py
deleted file mode 100644
index ebdc9f9..0000000
--- a/src/antlion/controllers/rohdeschwarz_lib/cmx500.py
+++ /dev/null
@@ -1,1067 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import time
-import sys
-
-from enum import Enum
-from os import path
-from antlion.controllers import abstract_inst
-
-DEFAULT_XLAPI_PATH = '/home/mobileharness/Rohde-Schwarz/XLAPI/latest/venv/lib/python3.7/site-packages'
-DEFAULT_LTE_STATE_CHANGE_TIMER = 10
-DEFAULT_CELL_SWITCH_ON_TIMER = 60
-DEFAULT_ENDC_TIMER = 300
-
-logger = logging.getLogger('Xlapi_cmx500')
-
-LTE_CELL_PROPERTIES = [
-    'band',
-    'bandwidth',
-    'dl_earfcn',
-    'ul_earfcn',
-    'total_dl_power',
-    'p_b',
-    'dl_epre',
-    'ref_signal_power',
-    'm',
-    'beamforming_antenna_ports',
-    'p0_nominal_pusch',
-]
-
-LTE_MHZ_UPPER_BOUND_TO_RB = [
-    (1.5, 6),
-    (4.0, 15),
-    (7.5, 25),
-    (12.5, 50),
-    (17.5, 75),
-]
-
-class DciFormat(Enum):
-    """Support DCI Formats for MIMOs."""
-    DCI_FORMAT_0 = 1
-    DCI_FORMAT_1 = 2
-    DCI_FORMAT_1A = 3
-    DCI_FORMAT_1B = 4
-    DCI_FORMAT_1C = 5
-    DCI_FORMAT_2 = 6
-    DCI_FORMAT_2A = 7
-    DCI_FORMAT_2B = 8
-    DCI_FORMAT_2C = 9
-    DCI_FORMAT_2D = 10
-
-
-class DuplexMode(Enum):
-    """Duplex Modes."""
-    FDD = 'FDD'
-    TDD = 'TDD'
-    DL_ONLY = 'DL_ONLY'
-
-
-class LteBandwidth(Enum):
-    """Supported LTE bandwidths."""
-    BANDWIDTH_1MHz = 6 # MHZ_1 is RB_6
-    BANDWIDTH_3MHz = 15 # MHZ_3 is RB_15
-    BANDWIDTH_5MHz = 25 # MHZ_5 is RB_25
-    BANDWIDTH_10MHz = 50 # MHZ_10 is RB_50
-    BANDWIDTH_15MHz = 75 # MHZ_15 is RB_75
-    BANDWIDTH_20MHz = 100 # MHZ_20 is RB_100
-
-
-class LteState(Enum):
-    """LTE ON and OFF."""
-    LTE_ON = 'ON'
-    LTE_OFF = 'OFF'
-
-
-class MimoModes(Enum):
-    """MIMO Modes dl antennas."""
-    MIMO1x1 = 1
-    MIMO2x2 = 2
-    MIMO4x4 = 4
-
-
-class ModulationType(Enum):
-    """Supported Modulation Types."""
-    Q16 = 0
-    Q64 = 1
-    Q256 = 2
-
-
-class NasState(Enum):
-    """NAS state between callbox and dut."""
-    DEREGISTERED = 'OFF'
-    EMM_REGISTERED = 'EMM'
-    MM5G_REGISTERED = 'NR'
-
-
-class RrcState(Enum):
-    """States to enable/disable rrc."""
-    RRC_ON = 'ON'
-    RRC_OFF = 'OFF'
-
-
-class RrcConnectionState(Enum):
-    """RRC Connection states, describes possible DUT RRC connection states."""
-    IDLE = 1
-    IDLE_PAGING = 2
-    IDLE_CONNECTION_ESTABLISHMENT = 3
-    CONNECTED = 4
-    CONNECTED_CONNECTION_REESTABLISHMENT = 5
-    CONNECTED_SCG_FAILURE = 6
-    CONNECTED_HANDOVER = 7
-    CONNECTED_CONNECTION_RELEASE = 8
-
-
-class SchedulingMode(Enum):
-    """Supported scheduling modes."""
-    USERDEFINEDCH = 'UDCHannels'
-
-
-class TransmissionModes(Enum):
-    """Supported transmission modes."""
-    TM1 = 1
-    TM2 = 2
-    TM3 = 3
-    TM4 = 4
-    TM7 = 7
-    TM8 = 8
-    TM9 = 9
-
-
-# For mimo 1x1, also set_num_crs_antenna_ports to 1
-MIMO_MAX_LAYER_MAPPING = {
-    MimoModes.MIMO1x1: 2,
-    MimoModes.MIMO2x2: 2,
-    MimoModes.MIMO4x4: 4,
-}
-
-
-class Cmx500(abstract_inst.SocketInstrument):
-
-    def __init__(self, ip_addr, port, xlapi_path=DEFAULT_XLAPI_PATH):
-        """Init method to setup variables for the controller.
-
-        Args:
-              ip_addr: Controller's ip address.
-              port: Port.
-        """
-
-        # keeps the socket connection for debug purpose for now
-        super().__init__(ip_addr, port)
-        if not xlapi_path in sys.path:
-            sys.path.insert(0, xlapi_path)
-        self._initial_xlapi()
-        self._settings.system.set_instrument_address(ip_addr)
-        logger.info('The instrument address is {}'.format(
-                self._settings.system.get_instrument_address()))
-
-        self.bts = []
-
-        # Stops all active cells if there is any
-        self.disconnect()
-
-        # loads cell default settings from parameter file if there is one
-        default_setup_path = 'default_cell_setup.rsxp'
-        if path.exists(default_setup_path):
-            self._settings.session.set_test_param_files(default_setup_path)
-
-        self.dut = self._network.get_dut()
-        self.lte_cell = self._network.create_lte_cell('ltecell0')
-        self.nr_cell = self._network.create_nr_cell('nrcell0')
-        self._config_antenna_ports()
-        self.lte_rrc_state_change_timer = DEFAULT_LTE_STATE_CHANGE_TIMER
-        self.rrc_state_change_time_enable = False
-        self.cell_switch_on_timer = DEFAULT_CELL_SWITCH_ON_TIMER
-
-    # _config_antenna_ports for the special RF connection with cmw500 + cmx500.
-    def _config_antenna_ports(self):
-        from rs_mrt.testenvironment.signaling.sri.rat.common import CsiRsAntennaPorts
-        from rs_mrt.testenvironment.signaling.sri.rat.lte import CrsAntennaPorts
-
-        max_csi_rs_ports = CsiRsAntennaPorts.NUMBER_CSI_RS_ANTENNA_PORTS_FOUR
-        max_crs_ports = CrsAntennaPorts.NUMBER_CRS_ANTENNA_PORTS_FOUR
-
-        lte_cell_max_config = self.lte_cell.stub.GetMaximumConfiguration()
-        lte_cell_max_config.csi_rs_antenna_ports = max_csi_rs_ports
-        lte_cell_max_config.crs_antenna_ports = max_crs_ports
-        self.lte_cell.stub.SetMaximumConfiguration(lte_cell_max_config)
-
-        nr_cell_max_config = self.nr_cell.stub.GetMaximumConfiguration()
-        nr_cell_max_config.csi_rs_antenna_ports = max_csi_rs_ports
-        self.nr_cell.stub.SetMaximumConfiguration(nr_cell_max_config)
-
-    def _initial_xlapi(self):
-        import xlapi
-        import mrtype
-        from xlapi import network
-        from xlapi import settings
-
-        self._xlapi = xlapi
-        self._network = network
-        self._settings = settings
-
-    def configure_mimo_settings(self, mimo, bts_index=0):
-        """Sets the mimo scenario for the test.
-
-        Args:
-            mimo: mimo scenario to set.
-        """
-        self.bts[bts_index].set_mimo_mode(mimo)
-
-    @property
-    def connection_type(self):
-        """Gets the connection type applied in callbox."""
-        state = self.dut.state.rrc_connection_state
-        return RrcConnectionState(state.value)
-
-    def create_base_station(self, cell):
-        """Creates the base station object with cell and current object.
-
-        Args:
-            cell: the XLAPI cell.
-
-        Returns:
-            base station object.
-        Raise:
-            CmxError if the cell is neither LTE nor NR.
-        """
-        from xlapi.lte_cell import LteCell
-        from xlapi.nr_cell import NrCell
-        if isinstance(cell, LteCell):
-            return LteBaseStation(self, cell)
-        elif isinstance(cell, NrCell):
-            return NrBaseStation(self, cell)
-        else:
-            raise CmxError('The cell type is neither LTE nor NR')
-
-    def detach(self):
-        """Detach callbox and controller."""
-        for bts in self.bts:
-            bts.stop()
-
-    def disable_packet_switching(self):
-        """Disable packet switching in call box."""
-        raise NotImplementedError()
-
-    def disconnect(self):
-        """Disconnect controller from device and switch to local mode."""
-
-        # Stops all lte and nr_cell
-        for cell in self._network.get_all_lte_cells():
-            if cell.is_on():
-                cell.stop()
-
-        for cell in self._network.get_all_nr_cells():
-            if cell.is_on():
-                cell.stop()
-        self.bts.clear()
-        self._network.reset()
-
-    def enable_packet_switching(self):
-        """Enable packet switching in call box."""
-        raise NotImplementedError()
-
-    def get_base_station(self, bts_index=0):
-        """Gets the base station object based on bts num. By default
-        bts_index set to 0 (PCC).
-
-        Args:
-            bts_num: base station identifier
-
-        Returns:
-            base station object.
-        """
-        return self.bts[bts_index]
-
-    def get_network(self):
-        """ Gets the network object from cmx500 object."""
-        return self._network
-
-    def init_lte_measurement(self):
-        """Gets the class object for lte measurement which can be used to
-        initiate measurements.
-
-        Returns:
-            lte measurement object.
-        """
-        raise NotImplementedError()
-
-    def reset(self):
-        """System level reset."""
-
-        self.disconnect()
-
-    @property
-    def rrc_connection(self):
-        """Gets the RRC connection state."""
-        return self.dut.state.rrc.is_connected
-
-    def set_timer(self, timeout):
-        """Sets timer for the Cmx500 class."""
-        self.rrc_state_change_time_enable = True
-        self.lte_rrc_state_change_timer = timeout
-
-    def switch_lte_signalling(self, state):
-        """ Turns LTE signalling ON/OFF.
-
-        Args:
-            state: an instance of LteState indicating the state to which LTE
-                   signal has to be set.
-        """
-        if not isinstance(state, LteState):
-            raise ValueError('state should be the instance of LteState.')
-
-        if self.bts:
-            self.disconnect()
-        self.bts.append(LteBaseStation(self, self.lte_cell))
-        # Switch on the primary Lte cell for on state and switch all lte cells
-        # if the state is off state
-        if state.value == 'ON':
-            self.bts[0].start()
-            cell_status = self.bts[0].wait_cell_on(self.cell_switch_on_timer)
-            if cell_status:
-                logger.info('The LTE pcell status is on')
-            else:
-                raise CmxError('The LTE pcell cannot be switched on')
-        else:
-            for bts in self.bts:
-                if isinstance(bts, LteBaseStation):
-                    bts.stop()
-                logger.info(
-                    'The LTE cell status is {} after stop'.format(bts.is_on()))
-
-    def switch_on_nsa_signalling(self):
-        if self.bts:
-            self.disconnect()
-        logger.info('Switches on NSA signalling')
-        self.bts.append(LteBaseStation(self, self.lte_cell))
-        self.bts.append(NrBaseStation(self, self.nr_cell))
-        self.bts[0].start()
-        lte_cell_status = self.bts[0].wait_cell_on(self.cell_switch_on_timer)
-        if lte_cell_status:
-            logger.info('The LTE pcell status is on')
-        else:
-            raise CmxError('The LTE pcell cannot be switched on')
-
-        self.bts[1].start()
-        nr_cell_status = self.bts[1].wait_cell_on(self.cell_switch_on_timer)
-        if nr_cell_status:
-            logger.info('The NR cell status is on')
-        else:
-            raise CmxError('The NR cell cannot be switched on')
-
-    def update_lte_cell_config(self, config):
-        """Updates lte cell settings with config."""
-        set_counts = 0
-        for property in LTE_CELL_PROPERTIES:
-            if property in config:
-                setter_name = 'set_' + property
-                setter = getattr(self.lte_cell, setter_name)
-                setter(config[property])
-                set_counts += 1
-        if set_counts < len(config):
-            logger.warning('Not all configs were set in update_cell_config')
-
-    @property
-    def use_carrier_specific(self):
-        """Gets current status of carrier specific duplex configuration."""
-        raise NotImplementedError()
-
-    @use_carrier_specific.setter
-    def use_carrier_specific(self, state):
-        """Sets the carrier specific duplex configuration.
-
-        Args:
-            state: ON/OFF UCS configuration.
-        """
-        raise NotImplementedError()
-
-    def wait_for_rrc_state(self, state, timeout=120):
-        """ Waits until a certain RRC state is set.
-
-        Args:
-            state: the RRC state that is being waited for.
-            timeout: timeout for phone to be in connected state.
-
-        Raises:
-            CmxError on time out.
-        """
-        is_idle = (state.value == 'OFF')
-        for idx in range(timeout):
-            time.sleep(1)
-            if self.dut.state.rrc.is_idle == is_idle:
-                logger.info('{} reached at {} s'.format(state.value, idx))
-                return True
-        error_message = 'Waiting for {} state timeout after {}'.format(
-                state.value, timeout)
-        logger.error(error_message)
-        raise CmxError(error_message)
-
-    def wait_until_attached(self, timeout=120):
-        """Waits until Lte attached.
-
-        Args:
-            timeout: timeout for phone to get attached.
-
-        Raises:
-            CmxError on time out.
-        """
-        try:
-            self.dut.signaling.wait_for_lte_attach(self.lte_cell, timeout)
-        except:
-            raise CmxError(
-                    'wait_until_attached timeout after {}'.format(timeout))
-
-
-class BaseStation(object):
-    """Class to interact with different the base stations."""
-
-    def __init__(self, cmx, cell):
-        """Init method to setup variables for base station.
-
-        Args:
-            cmx: Controller (Cmx500) object.
-            cell: The cell for the base station.
-        """
-
-        self._cell = cell
-        self._cmx = cmx
-        self._cc = cmx.dut.cc(cell)
-        self._network = cmx.get_network()
-
-    @property
-    def band(self):
-        """Gets the current band of cell.
-
-        Return:
-            the band number in int.
-        """
-        cell_band = self._cell.get_band()
-        return int(cell_band)
-
-    @property
-    def dl_power(self):
-        """Gets RSPRE level.
-
-        Return:
-            the power level in dbm.
-        """
-        return self._cell.get_total_dl_power().in_dBm()
-
-    @property
-    def duplex_mode(self):
-        """Gets current duplex of cell."""
-        band = self._cell.get_band()
-        if band.is_fdd():
-            return DuplexMode.FDD
-        if band.is_tdd():
-            return DuplexMode.TDD
-        if band.is_dl_only():
-            return DuplexMode.DL_ONLY
-
-    def is_on(self):
-        """Verifies if the cell is turned on.
-
-            Return:
-                boolean (if the cell is on).
-        """
-        return self._cell.is_on()
-
-    def set_band(self, band):
-        """Sets the Band of cell.
-
-        Args:
-            band: band of cell.
-        """
-        self._cell.set_band(band)
-        logger.info('The band is set to {} and is {} after setting'.format(
-                band, self.band))
-
-    def set_dl_mac_padding(self, state):
-        """Enables/Disables downlink padding at the mac layer.
-
-        Args:
-            state: a boolean
-        """
-        self._cc.set_dl_mac_padding(state)
-
-    def set_dl_power(self, pwlevel):
-        """Modifies RSPRE level.
-
-        Args:
-            pwlevel: power level in dBm.
-        """
-        self._cell.set_total_dl_power(pwlevel)
-
-    def set_ul_power(self, ul_power):
-        """Sets ul power
-
-        Args:
-            ul_power: the uplink power in dbm
-        """
-        self._cc.set_target_ul_power(ul_power)
-
-    def start(self):
-        """Starts the cell."""
-        self._cell.start()
-
-    def stop(self):
-        """Stops the cell."""
-        self._cell.stop()
-
-    def wait_cell_on(self, timeout):
-        """Waits the cell on.
-
-        Args:
-            timeout: the time for waiting the cell on.
-
-        Raises:
-            CmxError on time out.
-        """
-        waiting_time = 0
-        while waiting_time < timeout:
-            if self._cell.is_on():
-                return True
-            waiting_time += 1
-            time.sleep(1)
-        return self._cell.is_on()
-
-
-class LteBaseStation(BaseStation):
-    """ LTE base station."""
-
-    def __init__(self, cmx, cell):
-        """Init method to setup variables for the LTE base station.
-
-        Args:
-            cmx: Controller (Cmx500) object.
-            cell: The cell for the LTE base station.
-        """
-        from xlapi.lte_cell import LteCell
-        if not isinstance(cell, LteCell):
-            raise CmxError('The cell is not a LTE cell, LTE base station  fails'
-                           ' to create.')
-        super().__init__(cmx, cell)
-
-    def _config_scheduler(self, dl_mcs=None, dl_rb_alloc=None, dl_dci_ncce=None,
-        dl_dci_format=None, dl_tm=None, dl_num_layers=None, dl_mcs_table=None,
-        ul_mcs=None, ul_rb_alloc=None, ul_dci_ncce=None):
-
-        from rs_mrt.testenvironment.signaling.sri.rat.lte import DciFormat
-        from rs_mrt.testenvironment.signaling.sri.rat.lte import DlTransmissionMode
-        from rs_mrt.testenvironment.signaling.sri.rat.lte import MaxLayersMIMO
-        from rs_mrt.testenvironment.signaling.sri.rat.lte import McsTable
-        from rs_mrt.testenvironment.signaling.sri.rat.lte import PdcchFormat
-
-        log_list = []
-        if dl_mcs:
-            log_list.append('dl_mcs: {}'.format(dl_mcs))
-        if ul_mcs:
-            log_list.append('ul_mcs: {}'.format(ul_mcs))
-        if dl_rb_alloc:
-            log_list.append('dl_rb_alloc: {}'.format(dl_rb_alloc))
-        if ul_rb_alloc:
-            log_list.append('ul_rb_alloc: {}'.format(ul_rb_alloc))
-        if dl_dci_ncce:
-            dl_dci_ncce = PdcchFormat(dl_dci_ncce)
-            log_list.append('dl_dci_ncce: {}'.format(dl_dci_ncce))
-        if ul_dci_ncce:
-            ul_dci_ncce = PdcchFormat(ul_dci_ncce)
-            log_list.append('ul_dci_ncce: {}'.format(ul_dci_ncce))
-        if dl_dci_format:
-            dl_dci_format = DciFormat(dl_dci_format)
-            log_list.append('dl_dci_format: {}'.format(dl_dci_format))
-        if dl_tm:
-            dl_tm = DlTransmissionMode(dl_tm.value)
-            log_list.append('dl_tm: {}'.format(dl_tm))
-        if dl_num_layers:
-            dl_num_layers = MaxLayersMIMO(dl_num_layers)
-            log_list.append('dl_num_layers: {}'.format(dl_num_layers))
-        if dl_mcs_table:
-            dl_mcs_table = McsTable(dl_mcs_table)
-            log_list.append('dl_mcs_table: {}'.format(dl_mcs_table))
-
-        is_on = self._cell.is_on()
-        num_crs_antenna_ports = self._cell.get_num_crs_antenna_ports()
-
-        # Sets num of crs antenna ports to 4 for configuring
-        if is_on:
-            self._cell.stop()
-            time.sleep(1)
-        self._cell.set_num_crs_antenna_ports(4)
-        scheduler = self._cmx.dut.get_scheduler(self._cell)
-        logger.info('configure scheduler for {}'.format(','.join(log_list)))
-        scheduler.configure_scheduler(
-                dl_mcs=dl_mcs, dl_rb_alloc=dl_rb_alloc, dl_dci_ncce=dl_dci_ncce,
-                dl_dci_format=dl_dci_format, dl_tm=dl_tm,
-                dl_num_layers=dl_num_layers, dl_mcs_table=dl_mcs_table,
-                ul_mcs=ul_mcs, ul_rb_alloc=ul_rb_alloc, ul_dci_ncce=ul_dci_ncce)
-        logger.info('Configure scheduler succeeds')
-
-        # Sets num of crs antenna ports back to previous value
-        self._cell.set_num_crs_antenna_ports(num_crs_antenna_ports)
-        self._network.apply_changes()
-
-        if is_on:
-            self._cell.start()
-
-    @property
-    def bandwidth(self):
-        """Get the channel bandwidth of the cell.
-
-        Return:
-            the number rb of the bandwidth.
-        """
-        return self._cell.get_bandwidth().num_rb
-
-    @property
-    def dl_channel(self):
-        """Gets the downlink channel of cell.
-
-        Return:
-            the downlink channel (earfcn) in int.
-        """
-        return int(self._cell.get_dl_earfcn())
-
-    @property
-    def dl_frequency(self):
-        """Get the downlink frequency of the cell."""
-        from mrtype.frequency import Frequency
-        return self._cell.get_dl_earfcn().to_freq().in_units(
-                Frequency.Units.GHz)
-
-    def _to_rb_bandwidth(self, bandwidth):
-        for idx in range(5):
-            if bandwidth < LTE_MHZ_UPPER_BOUND_TO_RB[idx][0]:
-                return LTE_MHZ_UPPER_BOUND_TO_RB[idx][1]
-        return 100
-
-    def set_bandwidth(self, bandwidth):
-        """Sets the channel bandwidth of the cell.
-
-        Args:
-            bandwidth: channel bandwidth of cell in MHz.
-        """
-        self._cell.set_bandwidth(self._to_rb_bandwidth(bandwidth))
-
-    def set_cell_frequency_band(self, tdd_cfg=None, ssf_cfg=None):
-        """Sets cell frequency band with tdd and ssf config.
-
-        Args:
-            tdd_cfg: the tdd subframe assignment config in number (from 0-6).
-            ssf_cfg: the special subframe pattern config in number (from 1-9).
-        """
-        from rs_mrt.testenvironment.signaling.sri.rat.lte import SpecialSubframePattern
-        from rs_mrt.testenvironment.signaling.sri.rat.lte import SubFrameAssignment
-        from rs_mrt.testenvironment.signaling.sri.rat.lte.config import CellFrequencyBand
-        from rs_mrt.testenvironment.signaling.sri.rat.lte.config import Tdd
-        tdd_subframe = None
-        ssf_pattern = None
-        if tdd_cfg:
-            tdd_subframe = SubFrameAssignment(tdd_cfg + 1)
-        if ssf_cfg:
-            ssf_pattern = SpecialSubframePattern(ssf_cfg)
-        tdd = Tdd(tdd_config=Tdd.TddConfigSignaling(
-                subframe_assignment=tdd_subframe,
-                special_subframe_pattern=ssf_pattern))
-        self._cell.stub.SetCellFrequencyBand(CellFrequencyBand(tdd=tdd))
-        self._network.apply_changes()
-
-    def set_cfi(self, cfi):
-        """Sets number of pdcch symbols (cfi).
-
-        Args:
-            cfi: the value of NumberOfPdcchSymbols
-        """
-        from rs_mrt.testenvironment.signaling.sri.rat.lte import NumberOfPdcchSymbols
-        from rs_mrt.testenvironment.signaling.sri.rat.lte.config import PdcchRegionReq
-
-        logger.info('The cfi enum to set is {}'.format(
-                NumberOfPdcchSymbols(cfi)))
-        req = PdcchRegionReq()
-        req.num_pdcch_symbols = NumberOfPdcchSymbols(cfi)
-        self._cell.stub.SetPdcchControlRegion(req)
-
-    def set_dci_format(self, dci_format):
-        """Selects the downlink control information (DCI) format.
-
-        Args:
-            dci_format: supported dci.
-        """
-        if not isinstance(dci_format, DciFormat):
-            raise CmxError('Wrong type for dci_format')
-        self._config_scheduler(dl_dci_format=dci_format.value)
-
-    def set_dl_channel(self, channel):
-        """Sets the downlink channel number of cell.
-
-        Args:
-            channel: downlink channel number of cell.
-        """
-        if self.dl_channel == channel:
-            logger.info('The dl_channel was at {}'.format(self.dl_channel))
-            return
-        self._cell.set_earfcn(channel)
-        logger.info('The dl_channel was set to {}'.format(self.dl_channel))
-
-    def set_dl_modulation_table(self, modulation):
-        """Sets down link modulation table.
-
-        Args:
-            modulation: modulation table setting (ModulationType).
-        """
-        if not isinstance(modulation, ModulationType):
-            raise CmxError('The modulation is not the type of Modulation')
-        self._config_scheduler(dl_mcs_table=modulation.value)
-
-    def set_mimo_mode(self, mimo):
-        """Sets mimo mode for Lte scenario.
-
-        Args:
-            mimo: the mimo mode.
-        """
-        if not isinstance(mimo, MimoModes):
-            raise CmxError("Wrong type of mimo mode")
-
-        is_on = self._cell.is_on()
-        if is_on:
-            self._cell.stop()
-        self._cell.set_num_crs_antenna_ports(mimo.value)
-        self._config_scheduler(dl_num_layers=MIMO_MAX_LAYER_MAPPING[mimo])
-        if is_on:
-            self._cell.start()
-
-    def set_scheduling_mode(
-        self, mcs_dl=None, mcs_ul=None, nrb_dl=None, nrb_ul=None):
-        """Sets scheduling mode.
-
-        Args:
-            scheduling: the new scheduling mode.
-            mcs_dl: Downlink MCS.
-            mcs_ul: Uplink MCS.
-            nrb_dl: Number of RBs for downlink.
-            nrb_ul: Number of RBs for uplink.
-        """
-        self._config_scheduler(dl_mcs=mcs_dl, ul_mcs=mcs_ul, dl_rb_alloc=nrb_dl,
-                ul_rb_alloc=nrb_ul)
-
-    def set_ssf_config(self, ssf_config):
-        """Sets ssf subframe assignment with tdd_config.
-
-        Args:
-            ssf_config: the special subframe pattern config (from 1-9).
-        """
-        self.set_cell_frequency_band(ssf_cfg=ssf_config)
-
-    def set_tdd_config(self, tdd_config):
-        """Sets tdd subframe assignment with tdd_config.
-
-        Args:
-            tdd_config: the subframe assignemnt config (from 0-6).
-        """
-        self.set_cell_frequency_band(tdd_cfg=tdd_config)
-
-    def set_transmission_mode(self, transmission_mode):
-        """Sets transmission mode with schedular.
-
-        Args:
-            transmission_mode: the download link transmission mode.
-        """
-        if not isinstance(transmission_mode, TransmissionModes):
-            raise CmxError('Wrong type of the trasmission mode')
-        self._config_scheduler(dl_tm=transmission_mode)
-
-    def set_ul_channel(self, channel):
-        """Sets the up link channel number of cell.
-
-        Args:
-            channel: up link channel number of cell.
-        """
-        if self.ul_channel == channel:
-            logger.info('The ul_channel is at {}'.format(self.ul_channel))
-            return
-        self._cell.set_earfcn(channel)
-        logger.info('The dl_channel was set to {}'.format(self.ul_channel))
-
-    @property
-    def ul_channel(self):
-        """Gets the uplink channel of cell.
-
-        Return:
-            the uplink channel (earfcn) in int
-        """
-        return int(self._cell.get_ul_earfcn())
-
-    @property
-    def ul_frequency(self):
-        """Get the uplink frequency of the cell.
-
-        Return:
-            The uplink frequency in GHz.
-        """
-        from mrtype.frequency import Frequency
-        return self._cell.get_ul_earfcn().to_freq().in_units(
-                Frequency.Units.GHz)
-
-    def set_ul_modulation_table(self, modulation):
-        """Sets up link modulation table.
-
-        Args:
-            modulation: modulation table setting (ModulationType).
-        """
-        if not isinstance(modulation, ModulationType):
-            raise CmxError('The modulation is not the type of Modulation')
-        if modulation == ModulationType.Q16:
-            self._cell.stub.SetPuschCommonConfig(False)
-        else:
-            self._cell.stub.SetPuschCommonConfig(True)
-
-
-class NrBaseStation(BaseStation):
-    """ NR base station."""
-
-    def __init__(self, cmx, cell):
-        """Init method to setup variables for the NR base station.
-
-        Args:
-            cmx: Controller (Cmx500) object.
-            cell: The cell for the NR base station.
-        """
-        from xlapi.nr_cell import NrCell
-        if not isinstance(cell, NrCell):
-            raise CmxError('the cell is not a NR cell, NR base station  fails'
-                           ' to creat.')
-
-        super().__init__(cmx, cell)
-
-    def _config_scheduler(self, dl_mcs=None, dl_mcs_table=None,
-                          dl_rb_alloc=None, dl_mimo_mode=None,
-                          ul_mcs=None, ul_mcs_table=None, ul_rb_alloc=None,
-                          ul_mimo_mode=None):
-
-        from rs_mrt.testenvironment.signaling.sri.rat.nr import McsTable
-
-        log_list = []
-        if dl_mcs:
-            log_list.append('dl_mcs: {}'.format(dl_mcs))
-        if ul_mcs:
-            log_list.append('ul_mcs: {}'.format(ul_mcs))
-
-        # If rb alloc is not a tuple, add 0 as start RBs for XLAPI NR scheduler
-        if dl_rb_alloc:
-            if not isinstance(dl_rb_alloc, tuple):
-                dl_rb_alloc = (0, dl_rb_alloc)
-            log_list.append('dl_rb_alloc: {}'.format(dl_rb_alloc))
-        if ul_rb_alloc:
-            if not isinstance(ul_rb_alloc, tuple):
-                ul_rb_alloc = (0, ul_rb_alloc)
-            log_list.append('ul_rb_alloc: {}'.format(ul_rb_alloc))
-        if dl_mcs_table:
-            dl_mcs_table = McsTable(dl_mcs_table)
-            log_list.append('dl_mcs_table: {}'.format(dl_mcs_table))
-        if ul_mcs_table:
-            ul_mcs_table = McsTable(ul_mcs_table)
-            log_list.append('ul_mcs_table: {}'.format(ul_mcs_table))
-        if dl_mimo_mode:
-            log_list.append('dl_mimo_mode: {}'.format(dl_mimo_mode))
-        if ul_mimo_mode:
-            log_list.append('ul_mimo_mode: {}'.format(ul_mimo_mode))
-
-        is_on = self._cell.is_on()
-        if is_on:
-            self._cell.stop()
-            time.sleep(1)
-        scheduler = self._cmx.dut.get_scheduler(self._cell)
-        logger.info('configure scheduler for {}'.format(','.join(log_list)))
-
-        scheduler.configure_ue_scheduler(
-                dl_mcs=dl_mcs, dl_mcs_table=dl_mcs_table,
-                dl_rb_alloc=dl_rb_alloc, dl_mimo_mode=dl_mimo_mode,
-                ul_mcs=ul_mcs, ul_mcs_table=ul_mcs_table,
-                ul_rb_alloc=ul_rb_alloc, ul_mimo_mode=ul_mimo_mode)
-        logger.info('Configure scheduler succeeds')
-        self._network.apply_changes()
-
-        if is_on:
-            self._cell.start()
-
-    def attach_as_secondary_cell(self, endc_timer=DEFAULT_ENDC_TIMER):
-        """Enable endc mode for NR cell.
-
-        Args:
-            endc_timer: timeout for endc state
-        """
-        logger.info('enable endc mode for nsa dual connection')
-        self._cmx.dut.signaling.nsa_dual_connect(self._cell)
-        time_count = 0
-        while time_count < endc_timer:
-            if str(self._cmx.dut.state.radio_connectivity) == \
-                    'RadioConnectivityMode.EPS_LTE_NR':
-                logger.info('enter endc mode')
-                return
-            time.sleep(1)
-            time_count += 1
-            if time_count % 30 == 0:
-                logger.info('did not reach endc at {} s'.format(time_count))
-        raise CmxError('Cannot reach endc after {} s'.format(endc_timer))
-
-    @property
-    def dl_channel(self):
-        """Gets the downlink channel of cell.
-
-        Return:
-            the downlink channel (nr_arfcn) in int.
-        """
-        return int(self._cell.get_dl_ref_a())
-
-    def _bandwidth_to_carrier_bandwidth(self, bandwidth):
-        """Converts bandwidth in MHz to CarrierBandwidth.
-            CarrierBandwidth Enum in XLAPI:
-                MHZ_5 = 0
-                MHZ_10 = 1
-                MHZ_15 = 2
-                MHZ_20 = 3
-                MHZ_25 = 4
-                MHZ_30 = 5
-                MHZ_40 = 6
-                MHZ_50 = 7
-                MHZ_60 = 8
-                MHZ_70 = 9
-                MHZ_80 = 10
-                MHZ_90 = 11
-                MHZ_100 = 12
-                MHZ_200 = 13
-                MHZ_400 = 14
-        Args:
-            bandwidth: channel bandwidth in MHz.
-
-        Return:
-            the corresponding NR Carrier Bandwidth.
-        """
-        from mrtype.nr.frequency import CarrierBandwidth
-        if bandwidth > 100:
-            return CarrierBandwidth(12 + bandwidth // 200)
-        elif bandwidth > 30:
-            return CarrierBandwidth(2 + bandwidth // 10)
-        else:
-            return CarrierBandwidth(bandwidth // 5 - 1)
-
-    def set_bandwidth(self, bandwidth, scs=None):
-        """Sets the channel bandwidth of the cell.
-
-        Args:
-            bandwidth: channel bandwidth of cell.
-            scs: subcarrier spacing (SCS) of resource grid 0
-        """
-        if not scs:
-            scs = self._cell.get_scs()
-        self._cell.set_carrier_bandwidth_and_scs(
-                self._bandwidth_to_carrier_bandwidth(bandwidth), scs)
-        logger.info('The bandwidth in MHz is {}. After setting, the value is {}'
-                    .format(bandwidth, str(self._cell.get_carrier_bandwidth())))
-
-    def set_dl_channel(self, channel):
-        """Sets the downlink channel number of cell.
-
-        Args:
-            channel: downlink channel number of cell.
-        """
-        from mrtype.nr.frequency import NrArfcn
-        if self.dl_channel == channel:
-            logger.info('The dl_channel was at {}'.format(self.dl_channel))
-            return
-        self._cell.set_dl_ref_a_offset(self.band, NrArfcn(channel))
-        logger.info('The dl_channel was set to {}'.format(self.dl_channel))
-
-    def set_dl_modulation_table(self, modulation):
-        """Sets down link modulation table.
-
-        Args:
-            modulation: modulation table setting (ModulationType).
-        """
-        if not isinstance(modulation, ModulationType):
-            raise CmxError('The modulation is not the type of Modulation')
-        self._config_scheduler(dl_mcs_table=modulation.value)
-
-    def set_mimo_mode(self, mimo):
-        """Sets mimo mode for NR nsa scenario.
-
-        Args:
-            mimo: the mimo mode.
-        """
-        from rs_mrt.testenvironment.signaling.sri.rat.nr import DownlinkMimoMode
-        if not isinstance(mimo, MimoModes):
-            raise CmxError("Wrong type of mimo mode")
-
-        is_on = self._cell.is_on()
-        if is_on:
-            self._cell.stop()
-        self._config_scheduler(dl_mimo_mode=DownlinkMimoMode.Enum(mimo.value))
-        if is_on:
-            self._cell.start()
-
-    def set_scheduling_mode(
-        self, mcs_dl=None, mcs_ul=None, nrb_dl=None, nrb_ul=None):
-        """Sets scheduling mode.
-
-        Args:
-            mcs_dl: Downlink MCS.
-            mcs_ul: Uplink MCS.
-            nrb_dl: Number of RBs for downlink.
-            nrb_ul: Number of RBs for uplink.
-        """
-        self._config_scheduler(dl_mcs=mcs_dl, ul_mcs=mcs_ul, dl_rb_alloc=nrb_dl,
-                ul_rb_alloc=nrb_ul)
-
-    def set_ssf_config(self, ssf_config):
-        """Sets ssf subframe assignment with tdd_config.
-
-        Args:
-            ssf_config: the special subframe pattern config (from 1-9).
-        """
-        raise CmxError('the set ssf config for nr did not implemente yet')
-
-    def set_tdd_config(self, tdd_config):
-        """Sets tdd subframe assignment with tdd_config.
-
-        Args:
-            tdd_config: the subframe assignemnt config (from 0-6).
-        """
-        raise CmxError('the set tdd config for nr did not implemente yet')
-
-    def set_transmission_mode(self, transmission_mode):
-        """Sets transmission mode with schedular.
-
-        Args:
-            transmission_mode: the download link transmission mode.
-        """
-        logger.info('The set transmission mode for nr is set by mimo mode')
-
-    def set_ul_modulation_table(self, modulation):
-        """Sets down link modulation table.
-
-        Args:
-            modulation: modulation table setting (ModulationType).
-        """
-        if not isinstance(modulation, ModulationType):
-            raise CmxError('The modulation is not the type of Modulation')
-        self._config_scheduler(ul_mcs_table=modulation.value)
-
-
-class CmxError(Exception):
-    """Class to raise exceptions related to cmx."""
diff --git a/src/antlion/controllers/rohdeschwarz_lib/cmx500_cellular_simulator.py b/src/antlion/controllers/rohdeschwarz_lib/cmx500_cellular_simulator.py
deleted file mode 100644
index e8a7871..0000000
--- a/src/antlion/controllers/rohdeschwarz_lib/cmx500_cellular_simulator.py
+++ /dev/null
@@ -1,389 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.rohdeschwarz_lib import cmx500
-from antlion.controllers.rohdeschwarz_lib.cmx500 import LteBandwidth
-from antlion.controllers.rohdeschwarz_lib.cmx500 import LteState
-from antlion.controllers import cellular_simulator as cc
-from antlion.controllers.cellular_lib import LteSimulation
-
-CMX_TM_MAPPING = {
-    LteSimulation.TransmissionMode.TM1: cmx500.TransmissionModes.TM1,
-    LteSimulation.TransmissionMode.TM2: cmx500.TransmissionModes.TM2,
-    LteSimulation.TransmissionMode.TM3: cmx500.TransmissionModes.TM3,
-    LteSimulation.TransmissionMode.TM4: cmx500.TransmissionModes.TM4,
-    LteSimulation.TransmissionMode.TM7: cmx500.TransmissionModes.TM7,
-    LteSimulation.TransmissionMode.TM8: cmx500.TransmissionModes.TM8,
-    LteSimulation.TransmissionMode.TM9: cmx500.TransmissionModes.TM9,
-}
-
-CMX_SCH_MAPPING = {
-    LteSimulation.SchedulingMode.STATIC: cmx500.SchedulingMode.USERDEFINEDCH
-}
-
-CMX_MIMO_MAPPING = {
-    LteSimulation.MimoMode.MIMO_1x1: cmx500.MimoModes.MIMO1x1,
-    LteSimulation.MimoMode.MIMO_2x2: cmx500.MimoModes.MIMO2x2,
-    LteSimulation.MimoMode.MIMO_4x4: cmx500.MimoModes.MIMO4x4,
-}
-
-
-class CMX500CellularSimulator(cc.AbstractCellularSimulator):
-    """ A cellular simulator for telephony simulations based on the CMX 500
-    controller. """
-
-    def __init__(self, ip_address, port='5025'):
-        """ Initializes the cellular simulator.
-
-        Args:
-            ip_address: the ip address of the CMX500
-            port: the port number for the CMX500 controller
-        """
-        super().__init__()
-        try:
-            self.cmx = cmx500.Cmx500(ip_address, port)
-        except:
-            raise cc.CellularSimulatorError('Error when Initializes CMX500.')
-
-        self.bts = self.cmx.bts
-
-    def destroy(self):
-        """ Sends finalization commands to the cellular equipment and closes
-        the connection. """
-        self.log.info('destroy the cmx500 simulator')
-        self.cmx.disconnect()
-
-    def setup_lte_scenario(self):
-        """ Configures the equipment for an LTE simulation. """
-        self.log.info('setup lte scenario')
-        self.cmx.switch_lte_signalling(cmx500.LteState.LTE_ON)
-
-    def setup_nr_sa_scenario(self):
-        """ Configures the equipment for an NR stand alone simulation. """
-        raise NotImplementedError()
-
-    def setup_nr_nsa_scenario(self):
-        """ Configures the equipment for an NR non stand alone simulation. """
-        self.log.info('setup nsa scenario (start lte cell and nr cell')
-        self.cmx.switch_on_nsa_signalling()
-
-    def set_band_combination(self, bands):
-        """ Prepares the test equipment for the indicated band combination.
-
-        Args:
-            bands: a list of bands represented as ints or strings
-        """
-        self.num_carriers = len(bands)
-
-    def set_lte_rrc_state_change_timer(self, enabled, time=10):
-        """ Configures the LTE RRC state change timer.
-
-        Args:
-            enabled: a boolean indicating if the timer should be on or off.
-            time: time in seconds for the timer to expire
-        """
-        self.log.info('set timer enabled to {} and the time to {}'.format(
-            enabled, time))
-        self.cmx.rrc_state_change_time_enable = enabled
-        self.cmx.lte_rrc_state_change_timer = time
-
-    def set_band(self, bts_index, band):
-        """ Sets the band for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            band: the new band
-        """
-        self.log.info('set band to {}'.format(band))
-        self.bts[bts_index].set_band(int(band))
-
-    def get_duplex_mode(self, band):
-        """ Determines if the band uses FDD or TDD duplex mode
-
-        Args:
-            band: a band number
-
-        Returns:
-            an variable of class DuplexMode indicating if band is FDD or TDD
-        """
-        if 33 <= int(band) <= 46:
-            return cmx500.DuplexMode.TDD
-        else:
-            return cmx500.DuplexMode.FDD
-
-    def set_input_power(self, bts_index, input_power):
-        """ Sets the input power for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            input_power: the new input power
-        """
-        if input_power > 23:
-            self.log.warning('Open loop supports -50dBm to 23 dBm. '
-                             'Setting it to max power 23 dBm')
-            input_power = 23
-        self.log.info('set input power to {}'.format(input_power))
-        self.bts[bts_index].set_ul_power(input_power)
-
-    def set_output_power(self, bts_index, output_power):
-        """ Sets the output power for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            output_power: the new output power
-        """
-        self.log.info('set output power to {}'.format(output_power))
-        self.bts[bts_index].set_dl_power(output_power)
-
-    def set_tdd_config(self, bts_index, tdd_config):
-        """ Sets the tdd configuration number for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            tdd_config: the new tdd configuration number (from 0 to 6)
-        """
-        self.log.info('set tdd config to {}'.format(tdd_config))
-        self.bts[bts_index].set_tdd_config(tdd_config)
-
-    def set_ssf_config(self, bts_index, ssf_config):
-        """ Sets the Special Sub-Frame config number for the indicated
-        base station.
-
-        Args:
-            bts_index: the base station number
-            ssf_config: the new ssf config number (from 0 to 9)
-        """
-        self.log.info('set ssf config to {}'.format(ssf_config))
-        self.bts[bts_index].set_ssf_config(ssf_config)
-
-    def set_bandwidth(self, bts_index, bandwidth):
-        """ Sets the bandwidth for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            bandwidth: the new bandwidth in MHz
-        """
-        self.log.info('set bandwidth of bts {} to {}'.format(
-            bts_index, bandwidth))
-        self.bts[bts_index].set_bandwidth(int(bandwidth))
-
-    def set_downlink_channel_number(self, bts_index, channel_number):
-        """ Sets the downlink channel number for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            channel_number: the new channel number (earfcn)
-        """
-        self.log.info(
-            'Sets the downlink channel number to {}'.format(channel_number))
-        self.bts[bts_index].set_dl_channel(channel_number)
-
-    def set_mimo_mode(self, bts_index, mimo_mode):
-        """ Sets the mimo mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            mimo_mode: the new mimo mode
-        """
-        self.log.info('set mimo mode to {}'.format(mimo_mode))
-        mimo_mode = CMX_MIMO_MAPPING[mimo_mode]
-        self.bts[bts_index].set_mimo_mode(mimo_mode)
-
-    def set_transmission_mode(self, bts_index, tmode):
-        """ Sets the transmission mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            tmode: the new transmission mode
-        """
-        self.log.info('set TransmissionMode to {}'.format(tmode))
-        tmode = CMX_TM_MAPPING[tmode]
-        self.bts[bts_index].set_transmission_mode(tmode)
-
-    def set_scheduling_mode(self,
-                            bts_index,
-                            scheduling,
-                            mcs_dl=None,
-                            mcs_ul=None,
-                            nrb_dl=None,
-                            nrb_ul=None):
-        """ Sets the scheduling mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            scheduling: the new scheduling mode.
-            mcs_dl: Downlink MCS.
-            mcs_ul: Uplink MCS.
-            nrb_dl: Number of RBs for downlink.
-            nrb_ul: Number of RBs for uplink.
-        """
-        if scheduling not in CMX_SCH_MAPPING:
-            raise cc.CellularSimulatorError(
-                "This scheduling mode is not supported")
-        log_list = []
-        if mcs_dl:
-            log_list.append('mcs_dl: {}'.format(mcs_dl))
-        if mcs_ul:
-            log_list.append('mcs_ul: {}'.format(mcs_ul))
-        if nrb_dl:
-            log_list.append('nrb_dl: {}'.format(nrb_dl))
-        if nrb_ul:
-            log_list.append('nrb_ul: {}'.format(nrb_ul))
-
-        self.log.info('set scheduling mode to {}'.format(','.join(log_list)))
-        self.bts[bts_index].set_scheduling_mode(mcs_dl=mcs_dl,
-                                                mcs_ul=mcs_ul,
-                                                nrb_dl=nrb_dl,
-                                                nrb_ul=nrb_ul)
-
-    def set_dl_256_qam_enabled(self, bts_index, enabled):
-        """ Determines what MCS table should be used for the downlink.
-
-        Args:
-            bts_index: the base station number
-            enabled: whether 256 QAM should be used
-        """
-        self.log.info('Set 256 QAM DL MCS enabled: ' + str(enabled))
-        self.bts[bts_index].set_dl_modulation_table(
-            cmx500.ModulationType.Q256 if enabled else cmx500.ModulationType.
-            Q64)
-
-    def set_ul_64_qam_enabled(self, bts_index, enabled):
-        """ Determines what MCS table should be used for the uplink.
-
-        Args:
-            bts_index: the base station number
-            enabled: whether 64 QAM should be used
-        """
-        self.log.info('Set 64 QAM UL MCS enabled: ' + str(enabled))
-        self.bts[bts_index].set_ul_modulation_table(
-            cmx500.ModulationType.Q64 if enabled else cmx500.ModulationType.Q16
-        )
-
-    def set_mac_padding(self, bts_index, mac_padding):
-        """ Enables or disables MAC padding in the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            mac_padding: the new MAC padding setting
-        """
-        self.log.info('set mac pad on {}'.format(mac_padding))
-        self.bts[bts_index].set_dl_mac_padding(mac_padding)
-
-    def set_cfi(self, bts_index, cfi):
-        """ Sets the Channel Format Indicator for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            cfi: the new CFI setting
-        """
-        if cfi == 'BESTEFFORT':
-            self.log.info('The cfi is BESTEFFORT, use default value')
-            return
-        try:
-            index = int(cfi) + 1
-        except Exception as e:
-            index = 1
-        finally:
-            self.log.info('set the cfi and the cfi index is {}'.format(index))
-            self.bts[bts_index].set_cfi(index)
-
-    def set_paging_cycle(self, bts_index, cycle_duration):
-        """ Sets the paging cycle duration for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            cycle_duration: the new paging cycle duration in milliseconds
-        """
-        self.log.warning('The set_paging_cycle method is not implememted, '
-                         'use default value')
-
-    def set_phich_resource(self, bts_index, phich):
-        """ Sets the PHICH Resource setting for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            phich: the new PHICH resource setting
-        """
-        self.log.warning('The set_phich_resource method is not implememted, '
-                         'use default value')
-
-    def lte_attach_secondary_carriers(self, ue_capability_enquiry):
-        """ Activates the secondary carriers for CA. Requires the DUT to be
-        attached to the primary carrier first.
-
-        Args:
-            ue_capability_enquiry: UE capability enquiry message to be sent to
-        the UE before starting carrier aggregation.
-        """
-        self.wait_until_communication_state()
-        self.bts[1].attach_as_secondary_cell()
-
-    def wait_until_attached(self, timeout=120):
-        """ Waits until the DUT is attached to the primary carrier.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        self.log.info('wait until attached')
-        self.cmx.wait_until_attached(timeout)
-
-    def wait_until_communication_state(self, timeout=120):
-        """ Waits until the DUT is in Communication state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        Return:
-            True if cmx reach rrc state within timeout
-        Raise:
-            CmxError if tiemout
-        """
-        self.log.info('wait for rrc on state')
-        return self.cmx.wait_for_rrc_state(cmx500.RrcState.RRC_ON, timeout)
-
-    def wait_until_idle_state(self, timeout=120):
-        """ Waits until the DUT is in Idle state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        Return:
-            True if cmx reach rrc state within timeout
-        Raise:
-            CmxError if tiemout
-        """
-        self.log.info('wait for rrc off state')
-        return self.cmx.wait_for_rrc_state(cmx500.RrcState.RRC_OFF, timeout)
-
-    def detach(self):
-        """ Turns off all the base stations so the DUT loose connection."""
-        self.log.info('Bypass simulator detach step for now')
-
-    def stop(self):
-        """ Stops current simulation. After calling this method, the simulator
-        will need to be set up again. """
-        self.log.info('Stops current simulation and disconnect cmx500')
-        self.cmx.disconnect()
-
-    def start_data_traffic(self):
-        """ Starts transmitting data from the instrument to the DUT. """
-        self.log.warning('The start_data_traffic is not implemented yet')
-
-    def stop_data_traffic(self):
-        """ Stops transmitting data from the instrument to the DUT. """
-        self.log.warning('The stop_data_traffic is not implemented yet')
diff --git a/src/antlion/controllers/rohdeschwarz_lib/contest.py b/src/antlion/controllers/rohdeschwarz_lib/contest.py
deleted file mode 100644
index 39308e9..0000000
--- a/src/antlion/controllers/rohdeschwarz_lib/contest.py
+++ /dev/null
@@ -1,422 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from zeep import client
-from antlion.libs.proc import job
-from xml.etree import ElementTree
-import requests
-import asyncio
-import time
-import threading
-import re
-import os
-import logging
-
-
-class Contest(object):
-    """ Controller interface for Rohde Schwarz CONTEST sequencer software. """
-
-    # Remote Server parameter / operation names
-    TESTPLAN_PARAM = 'Testplan'
-    TESTPLAN_VERSION_PARAM = 'TestplanVersion'
-    KEEP_ALIVE_PARAM = 'KeepContestAlive'
-    START_TESTPLAN_OPERATION = 'StartTestplan'
-
-    # Results dictionary keys
-    POS_ERROR_KEY = 'pos_error'
-    TTFF_KEY = 'ttff'
-    SENSITIVITY_KEY = 'sensitivity'
-
-    # Waiting times
-    OUTPUT_WAITING_INTERVAL = 5
-
-    # Maximum number of times to retry if the Contest system is not responding
-    MAXIMUM_OUTPUT_READ_RETRIES = 25
-
-    # Root directory for the FTP server in the remote computer
-    FTP_ROOT = 'D:\\Logs\\'
-
-    def __init__(self, logger, remote_ip, remote_port, automation_listen_ip,
-                 automation_port, dut_on_func, dut_off_func, ftp_usr, ftp_pwd):
-        """
-        Initializes the Contest software controller.
-
-        Args:
-            logger: a logger handle.
-            remote_ip: the Remote Server's IP address.
-            remote_port: port number used by the Remote Server.
-            automation_listen_ip: local IP address in which to listen for
-                Automation Server connections.
-            automation_port: port used for Contest's DUT automation requests.
-            dut_on_func: function to turn the DUT on.
-            dut_off_func: function to turn the DUT off.
-            ftp_usr: username to login to the FTP server on the remote host
-            ftp_pwd: password to authenticate ftp_user in the ftp server
-        """
-        self.log = logger
-        self.ftp_user = ftp_usr
-        self.ftp_pass = ftp_pwd
-
-        self.remote_server_ip = remote_ip
-
-        server_url = 'http://{}:{}/RemoteServer'.format(remote_ip, remote_port)
-
-        # Initialize the SOAP client to interact with Contest's Remote Server
-        try:
-            self.soap_client = client.Client(server_url + '/RemoteServer?wsdl')
-        except requests.exceptions.ConnectionError:
-            self.log.error('Could not connect to the remote endpoint. Is '
-                           'Remote Server running on the Windows computer?')
-            raise
-
-        # Assign a value to asyncio_loop in case the automation server is not
-        # started
-        self.asyncio_loop = None
-
-        # Start the automation server if an IP and port number were passed
-        if automation_listen_ip and automation_port:
-            self.start_automation_server(automation_port, automation_listen_ip,
-                                         dut_on_func, dut_off_func)
-
-    def start_automation_server(self, automation_port, automation_listen_ip,
-                                dut_on_func, dut_off_func):
-        """ Starts the Automation server in a separate process.
-
-        Args:
-            automation_listen_ip: local IP address in which to listen for
-                Automation Server connections.
-            automation_port: port used for Contest's DUT automation requests.
-            dut_on_func: function to turn the DUT on.
-            dut_off_func: function to turn the DUT off.
-        """
-
-        # Start an asyncio event loop to run the automation server
-        self.asyncio_loop = asyncio.new_event_loop()
-
-        # Start listening for automation requests on a separate thread. This
-        # will start a new thread in which a socket will listen for incoming
-        # connections and react to Contest's automation commands
-
-        def start_automation_server(asyncio_loop):
-            AutomationServer(self.log, automation_port, automation_listen_ip,
-                             dut_on_func, dut_off_func, asyncio_loop)
-
-        automation_daemon = threading.Thread(
-            target=start_automation_server, args=[self.asyncio_loop])
-        automation_daemon.start()
-
-    def execute_testplan(self, testplan):
-        """ Executes a test plan with Contest's Remote Server sequencer.
-
-        Waits until and exit code is provided in the output. Logs the output with
-        the class logger and pulls the json report from the server if the test
-        succeeds.
-
-        Arg:
-            testplan: the test plan's name in the Contest system
-
-        Returns:
-            a dictionary with test results if the test finished successfully,
-            and None if it finished with an error exit code.
-        """
-
-        self.soap_client.service.DoSetParameterValue(self.TESTPLAN_PARAM,
-                                                     testplan)
-        self.soap_client.service.DoSetParameterValue(
-            self.TESTPLAN_VERSION_PARAM, 16)
-        self.soap_client.service.DoSetParameterValue(self.KEEP_ALIVE_PARAM,
-                                                     'true')
-
-        # Remote Server sometimes doesn't respond to the request immediately and
-        # frequently times out producing an exception. A shorter timeout will
-        # throw the exception earlier and allow the script to continue.
-        with self.soap_client.options(timeout=5):
-            try:
-                self.soap_client.service.DoStartOperation(
-                    self.START_TESTPLAN_OPERATION)
-            except requests.exceptions.ReadTimeout:
-                pass
-
-        self.log.info('Started testplan {} in Remote Server.'.format(testplan))
-
-        testplan_directory = None
-        read_retries = 0
-
-        while True:
-
-            time.sleep(self.OUTPUT_WAITING_INTERVAL)
-            output = self.soap_client.service.DoGetOutput()
-
-            # Output might be None while the instrument is busy.
-            if output:
-                self.log.debug(output)
-
-                # Obtain the path to the folder where reports generated by the
-                # test equipment will be stored in the remote computer
-                if not testplan_directory:
-                    prefix = re.escape('Testplan Directory: ' + self.FTP_ROOT)
-                    match = re.search('(?<={}).+(?=\\\\)'.format(prefix),
-                                      output)
-                    if match:
-                        testplan_directory = match.group(0)
-
-                # An exit code in the output indicates that the measurement is
-                # completed.
-                match = re.search('(?<=Exit code: )-?\d+', output)
-                if match:
-                    exit_code = int(match.group(0))
-                    break
-
-                # Reset the not-responding counter
-                read_retries = 0
-
-            else:
-                # If the output has been None for too many retries in a row,
-                # the testing instrument is assumed to be unresponsive.
-                read_retries += 1
-                if read_retries == self.MAXIMUM_OUTPUT_READ_RETRIES:
-                    raise RuntimeError('The Contest test sequencer is not '
-                                       'responding.')
-
-        self.log.info(
-            'Contest testplan finished with exit code {}.'.format(exit_code))
-
-        if exit_code in [0, 1]:
-            self.log.info('Testplan reports are stored in {}.'.format(
-                testplan_directory))
-
-            return self.pull_test_results(testplan_directory)
-
-    def pull_test_results(self, testplan_directory):
-        """ Downloads the test reports from the remote host and parses the test
-        summary to obtain the results.
-
-        Args:
-            testplan_directory: directory where to look for reports generated
-                by the test equipment in the remote computer
-
-        Returns:
-             a JSON object containing the test results
-        """
-
-        if not testplan_directory:
-            raise ValueError('Invalid testplan directory.')
-
-        # Download test reports from the remote host
-        job.run('wget -r --user={} --password={} -P {} ftp://{}/{}'.format(
-            self.ftp_user, self.ftp_pass, logging.log_path,
-            self.remote_server_ip, testplan_directory))
-
-        # Open the testplan directory
-        testplan_path = os.path.join(logging.log_path, self.remote_server_ip,
-                                     testplan_directory)
-
-        # Find the report.json file in the testcase folder
-        dir_list = os.listdir(testplan_path)
-        xml_path = None
-
-        for dir in dir_list:
-            if 'TestCaseName' in dir:
-                xml_path = os.path.join(testplan_path, dir,
-                                        'SummaryReport.xml')
-                break
-
-        if not xml_path:
-            raise RuntimeError('Could not find testcase directory.')
-
-        # Return the obtained report as a dictionary
-        xml_tree = ElementTree.ElementTree()
-        xml_tree.parse(source=xml_path)
-
-        results_dictionary = {}
-
-        col_iterator = xml_tree.iter('column')
-        for col in col_iterator:
-            # Look in the text of the first child for the required metrics
-            if col.text == '2D position error [m]':
-                results_dictionary[self.POS_ERROR_KEY] = {
-                    'min': float(next(col_iterator).text),
-                    'med': float(next(col_iterator).text),
-                    'avg': float(next(col_iterator).text),
-                    'max': float(next(col_iterator).text)
-                }
-            elif col.text == 'Time to first fix [s]':
-                results_dictionary[self.TTFF_KEY] = {
-                    'min': float(next(col_iterator).text),
-                    'med': float(next(col_iterator).text),
-                    'avg': float(next(col_iterator).text),
-                    'max': float(next(col_iterator).text)
-                }
-
-        message_iterator = xml_tree.iter('message')
-        for message in message_iterator:
-            # Look for the line showing sensitivity
-            if message.text:
-                # The typo in 'successfull' is intended as it is present in the
-                # test logs generated by the Contest system.
-                match = re.search('(?<=Margin search completed, the lowest '
-                                  'successfull output power is )-?\d+.?\d+'
-                                  '(?= dBm)', message.text)
-                if match:
-                    results_dictionary[self.SENSITIVITY_KEY] = float(
-                        match.group(0))
-                    break
-
-        return results_dictionary
-
-    def destroy(self):
-        """ Closes all open connections and kills running threads. """
-        if self.asyncio_loop:
-            # Stopping the asyncio loop will let the Automation Server exit
-            self.asyncio_loop.call_soon_threadsafe(self.asyncio_loop.stop)
-
-
-class AutomationServer:
-    """ Server object that handles DUT automation requests from Contest's Remote
-    Server.
-    """
-
-    def __init__(self, logger, port, listen_ip, dut_on_func, dut_off_func,
-                 asyncio_loop):
-        """ Initializes the Automation Server.
-
-        Opens a listening socket using a asyncio and waits for incoming
-        connections.
-
-        Args:
-            logger: a logger handle
-            port: port used for Contest's DUT automation requests
-            listen_ip: local IP in which to listen for connections
-            dut_on_func: function to turn the DUT on
-            dut_off_func: function to turn the DUT off
-            asyncio_loop: asyncio event loop to listen and process incoming
-                data asynchronously
-        """
-
-        self.log = logger
-
-        # Define a protocol factory that will provide new Protocol
-        # objects to the server created by asyncio. This Protocol
-        # objects will handle incoming commands
-        def aut_protocol_factory():
-            return self.AutomationProtocol(logger, dut_on_func, dut_off_func)
-
-        # Each client connection will create a new protocol instance
-        coro = asyncio_loop.create_server(aut_protocol_factory, listen_ip,
-                                          port)
-
-        self.server = asyncio_loop.run_until_complete(coro)
-
-        # Serve requests until Ctrl+C is pressed
-        self.log.info('Automation Server listening on {}'.format(
-            self.server.sockets[0].getsockname()))
-        asyncio_loop.run_forever()
-
-    class AutomationProtocol(asyncio.Protocol):
-        """ Defines the protocol for communication with Contest's Automation
-        client. """
-
-        AUTOMATION_DUT_ON = 'DUT_SWITCH_ON'
-        AUTOMATION_DUT_OFF = 'DUT_SWITCH_OFF'
-        AUTOMATION_OK = 'OK'
-
-        NOTIFICATION_TESTPLAN_START = 'AtTestplanStart'
-        NOTIFICATION_TESTCASE_START = 'AtTestcaseStart'
-        NOTIFICATION_TESCASE_END = 'AfterTestcase'
-        NOTIFICATION_TESTPLAN_END = 'AfterTestplan'
-
-        def __init__(self, logger, dut_on_func, dut_off_func):
-            """ Keeps the function handles to be used upon incoming requests.
-
-            Args:
-                logger: a logger handle
-                dut_on_func: function to turn the DUT on
-                dut_off_func: function to turn the DUT off
-            """
-
-            self.log = logger
-            self.dut_on_func = dut_on_func
-            self.dut_off_func = dut_off_func
-
-        def connection_made(self, transport):
-            """ Called when a connection has been established.
-
-            Args:
-                transport: represents the socket connection.
-            """
-
-            # Keep a reference to the transport as it will allow to write
-            # data to the socket later.
-            self.transport = transport
-
-            peername = transport.get_extra_info('peername')
-            self.log.info('Connection from {}'.format(peername))
-
-        def data_received(self, data):
-            """ Called when some data is received.
-
-            Args:
-                 data: non-empty bytes object containing the incoming data
-             """
-            command = data.decode()
-
-            # Remove the line break and newline characters at the end
-            command = re.sub('\r?\n$', '', command)
-
-            self.log.info("Command received from Contest's Automation "
-                          "client: {}".format(command))
-
-            if command == self.AUTOMATION_DUT_ON:
-                self.log.info("Contest's Automation client requested to set "
-                              "DUT to on state.")
-                self.send_ok()
-                self.dut_on_func()
-                return
-            elif command == self.AUTOMATION_DUT_OFF:
-                self.log.info("Contest's Automation client requested to set "
-                              "DUT to off state.")
-                self.dut_off_func()
-                self.send_ok()
-            elif command.startswith(self.NOTIFICATION_TESTPLAN_START):
-                self.log.info('Test plan is starting.')
-                self.send_ok()
-            elif command.startswith(self.NOTIFICATION_TESTCASE_START):
-                self.log.info('Test case is starting.')
-                self.send_ok()
-            elif command.startswith(self.NOTIFICATION_TESCASE_END):
-                self.log.info('Test case finished.')
-                self.send_ok()
-            elif command.startswith(self.NOTIFICATION_TESTPLAN_END):
-                self.log.info('Test plan finished.')
-                self.send_ok()
-            else:
-                self.log.error('Unhandled automation command: ' + command)
-                raise ValueError()
-
-        def send_ok(self):
-            """ Sends an OK message to the Automation server. """
-            self.log.info("Sending OK response to Contest's Automation client")
-            self.transport.write(
-                bytearray(
-                    self.AUTOMATION_OK + '\n',
-                    encoding='utf-8',
-                    ))
-
-        def eof_received(self):
-            """ Called when the other end signals it won’t send any more
-            data.
-            """
-            self.log.info('Received EOF from Contest Automation client.')
diff --git a/src/antlion/controllers/rohdeschwarz_lib/smbv100.py b/src/antlion/controllers/rohdeschwarz_lib/smbv100.py
deleted file mode 100644
index 10ec98c..0000000
--- a/src/antlion/controllers/rohdeschwarz_lib/smbv100.py
+++ /dev/null
@@ -1,163 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Python module for Rohde & Schwarz SMBV100 Vector Signal Generator."""
-
-import numbers
-from antlion.controllers import abstract_inst
-
-
-class SMBV100Error(abstract_inst.SocketInstrumentError):
-    """SMBV100 Instrument Error Class."""
-
-
-class SMBV100(abstract_inst.SocketInstrument):
-    """SMBV100 Class, inherted from abstract_inst SocketInstrument."""
-
-    def __init__(self, ip_addr, ip_port):
-        """Init method for SMBV100.
-
-        Args:
-            ip_addr: IP Address.
-                Type, str.
-            ip_port: TCPIP Port.
-                Type, str.
-        """
-        super(SMBV100, self).__init__(ip_addr, ip_port)
-
-        self.idn = ''
-
-    def connect(self):
-        """Init and Connect to SMBV100."""
-        self._connect_socket()
-
-        self.get_idn()
-
-        infmsg = 'Connected to SMBV100, with ID: {}'.format(self.idn)
-        self._logger.debug(infmsg)
-
-    def close(self):
-        """Close SMBV100."""
-        self._close_socket()
-
-        self._logger.debug('Closed connection to SMBV100')
-
-    def get_idn(self):
-        """Get the Idenification of SMBV100.
-
-        Returns:
-            SMBV100 Identifier
-        """
-        self.idn = self._query('*IDN?')
-
-        return self.idn
-
-    def preset(self):
-        """Preset SMBV100 to default status."""
-        self._send('*RST')
-
-        self._logger.debug('Preset SMBV100')
-
-    def set_rfout_state(self, state):
-        """set SMBV100 RF output state.
-
-        Args:
-            state: RF output state.
-                Type, str. Option, ON/OFF.
-
-        Raises:
-            SMBV100Error: raise when state is not ON/OFF.
-        """
-
-        if state not in ['ON', 'OFF']:
-            raise SMBV100Error(error='"state" input must be "ON" or "OFF"',
-                               command='set_rfout')
-
-        self._send(':OUTP ' + state)
-
-        infmsg = 'set SMBV100 RF output to "{}"'.format(state)
-        self._logger.debug(infmsg)
-
-    def set_rfout_freq(self, freq):
-        """set SMBV100 RF output frequency.
-
-        Args:
-            freq: RF output frequency.
-                Type, num.
-
-        Raises:
-            SMBV100Error: raise when 'freq' is not numerical value.
-        """
-
-        if not isinstance(freq, numbers.Number):
-            raise SMBV100Error(error='"freq" input must be numerical value',
-                               command='set_rfoutfreq')
-
-        self._send(':SOUR:FREQ:CW ' + str(freq))
-
-        infmsg = 'set SMBV100 RF output frequency to {} Hz'.format(freq)
-        self._logger.debug(infmsg)
-
-    def get_rfout_freq(self):
-        """get SMBV100 RF output frequency.
-
-        Return:
-            freq: RF output frequency.
-                Type, num.
-        """
-        resp = self._query(':SOUR:FREQ:CW?')
-
-        freq = float(resp.split(';')[0])
-
-        infmsg = 'get SMBV100 RF output frequency as {} Hz'.format(freq)
-        self._logger.debug(infmsg)
-
-        return freq
-
-    def set_rfout_level(self, level):
-        """set SMBV100 RF output level.
-
-        Args:
-            level: RF Level.
-                Type, num.
-
-        Raises:
-            SMBV100Error: raise when 'level' is not numerical value.
-        """
-
-        if not isinstance(level, numbers.Number):
-            raise SMBV100Error(error='"level" input must be numerical value',
-                               command='set_rflevel')
-
-        self._send(':SOUR:POW:LEV:IMM:AMPL ' + str(level))
-
-        infmsg = 'set SMBV100 RF level to {} dBm'.format(level)
-        self._logger.debug(infmsg)
-
-    def get_rfout_level(self):
-        """get SMBV100 RF out level.
-
-        Return:
-            level: RF Level.
-                Type, num.
-        """
-        resp = self._query(':SOUR:POW:LEV:IMM:AMPL?')
-
-        level = float(resp.split(';')[0])
-
-        infmsg = 'get SMBV100 RF level as {} dBm'.format(level)
-        self._logger.debug(infmsg)
-
-        return level
diff --git a/src/antlion/controllers/sl4a_lib/error_reporter.py b/src/antlion/controllers/sl4a_lib/error_reporter.py
index c36593b..e560567 100644
--- a/src/antlion/controllers/sl4a_lib/error_reporter.py
+++ b/src/antlion/controllers/sl4a_lib/error_reporter.py
@@ -31,7 +31,7 @@
 
     def process(self, msg, kwargs):
         """Transforms a log message to be in a given format."""
-        return '[Error Report|%s] %s' % (self.label, msg), kwargs
+        return "[Error Report|%s] %s" % (self.label, msg), kwargs
 
 
 class ErrorReporter(object):
@@ -74,13 +74,15 @@
             if not ticket:
                 return False
 
-            report = ErrorLogger('%s|%s' % (self.name, ticket))
-            report.info('Creating error report.')
+            report = ErrorLogger("%s|%s" % (self.name, ticket))
+            report.info("Creating error report.")
 
-            (self.report_on_adb(sl4a_manager.adb, report)
-             and self.report_device_processes(sl4a_manager.adb, report) and
-             self.report_sl4a_state(rpc_connection, sl4a_manager.adb, report)
-             and self.report_sl4a_session(sl4a_manager, sl4a_session, report))
+            (
+                self.report_on_adb(sl4a_manager.adb, report)
+                and self.report_device_processes(sl4a_manager.adb, report)
+                and self.report_sl4a_state(rpc_connection, sl4a_manager.adb, report)
+                and self.report_sl4a_session(sl4a_manager, sl4a_session, report)
+            )
 
             return True
         finally:
@@ -90,27 +92,31 @@
         """Creates an error report for ADB. Returns false if ADB has failed."""
         adb_uptime = utils.get_command_uptime('"adb .* server"')
         if adb_uptime:
-            report.info('The adb daemon has an uptime of %s '
-                        '([[dd-]hh:]mm:ss).' % adb_uptime)
+            report.info(
+                "The adb daemon has an uptime of %s " "([[dd-]hh:]mm:ss)." % adb_uptime
+            )
         else:
-            report.warning('The adb daemon (on the host machine) is not '
-                           'running. All forwarded ports have been removed.')
+            report.warning(
+                "The adb daemon (on the host machine) is not "
+                "running. All forwarded ports have been removed."
+            )
             return False
 
         devices_output = adb.devices()
         if adb.serial not in devices_output:
             report.warning(
-                'This device cannot be found by ADB. The device may have shut '
-                'down or disconnected.')
+                "This device cannot be found by ADB. The device may have shut "
+                "down or disconnected."
+            )
             return False
-        elif re.findall(r'%s\s+offline' % adb.serial, devices_output):
+        elif re.findall(r"%s\s+offline" % adb.serial, devices_output):
             report.warning(
-                'The device is marked as offline in ADB. We are no longer able '
-                'to access the device.')
+                "The device is marked as offline in ADB. We are no longer able "
+                "to access the device."
+            )
             return False
         else:
-            report.info(
-                'The device is online and accessible through ADB calls.')
+            report.info("The device is online and accessible through ADB calls.")
         return True
 
     def report_device_processes(self, adb, report):
@@ -119,89 +125,102 @@
         Returns:
             False iff user-apks cannot be communicated with over tcp.
         """
-        zygote_uptime = utils.get_device_process_uptime(adb, 'zygote')
+        zygote_uptime = utils.get_device_process_uptime(adb, "zygote")
         if zygote_uptime:
             report.info(
-                'Zygote has been running for %s ([[dd-]hh:]mm:ss). If this '
-                'value is low, the phone may have recently crashed.' %
-                zygote_uptime)
+                "Zygote has been running for %s ([[dd-]hh:]mm:ss). If this "
+                "value is low, the phone may have recently crashed." % zygote_uptime
+            )
         else:
             report.warning(
-                'Zygote has been killed. It is likely the Android Runtime has '
-                'crashed. Check the bugreport/logcat for more information.')
+                "Zygote has been killed. It is likely the Android Runtime has "
+                "crashed. Check the bugreport/logcat for more information."
+            )
             return False
 
-        netd_uptime = utils.get_device_process_uptime(adb, 'netd')
+        netd_uptime = utils.get_device_process_uptime(adb, "netd")
         if netd_uptime:
             report.info(
-                'Netd has been running for %s ([[dd-]hh:]mm:ss). If this '
-                'value is low, the phone may have recently crashed.' %
-                zygote_uptime)
+                "Netd has been running for %s ([[dd-]hh:]mm:ss). If this "
+                "value is low, the phone may have recently crashed." % zygote_uptime
+            )
         else:
             report.warning(
-                'Netd has been killed. The Android Runtime may have crashed. '
-                'Check the bugreport/logcat for more information.')
+                "Netd has been killed. The Android Runtime may have crashed. "
+                "Check the bugreport/logcat for more information."
+            )
             return False
 
-        adbd_uptime = utils.get_device_process_uptime(adb, 'adbd')
+        adbd_uptime = utils.get_device_process_uptime(adb, "adbd")
         if netd_uptime:
             report.info(
-                'Adbd has been running for %s ([[dd-]hh:]mm:ss). If this '
-                'value is low, the phone may have recently crashed.' %
-                adbd_uptime)
+                "Adbd has been running for %s ([[dd-]hh:]mm:ss). If this "
+                "value is low, the phone may have recently crashed." % adbd_uptime
+            )
         else:
-            report.warning('Adbd is not running.')
+            report.warning("Adbd is not running.")
             return False
         return True
 
     def report_sl4a_state(self, rpc_connection, adb, report):
         """Creates an error report for the state of SL4A."""
-        report.info(
-            'Diagnosing Failure over connection %s.' % rpc_connection.ports)
+        report.info("Diagnosing Failure over connection %s." % rpc_connection.ports)
 
         ports = rpc_connection.ports
-        forwarded_ports_output = adb.forward('--list')
+        forwarded_ports_output = adb.forward("--list")
 
-        expected_output = '%s tcp:%s tcp:%s' % (
-            adb.serial, ports.forwarded_port, ports.server_port)
+        expected_output = "%s tcp:%s tcp:%s" % (
+            adb.serial,
+            ports.forwarded_port,
+            ports.server_port,
+        )
         if expected_output not in forwarded_ports_output:
             formatted_output = re.sub(
-                '^', '    ', forwarded_ports_output, flags=re.MULTILINE)
+                "^", "    ", forwarded_ports_output, flags=re.MULTILINE
+            )
             report.warning(
-                'The forwarded port for the failed RpcConnection is missing.\n'
-                'Expected:\n    %s\nBut found:\n%s' % (expected_output,
-                                                       formatted_output))
+                "The forwarded port for the failed RpcConnection is missing.\n"
+                "Expected:\n    %s\nBut found:\n%s"
+                % (expected_output, formatted_output)
+            )
             return False
         else:
-            report.info('The connection port has been properly forwarded to '
-                        'the device.')
+            report.info(
+                "The connection port has been properly forwarded to " "the device."
+            )
 
         sl4a_uptime = utils.get_device_process_uptime(
-            adb, 'com.googlecode.android_scripting')
+            adb, "com.googlecode.android_scripting"
+        )
         if sl4a_uptime:
             report.info(
-                'SL4A has been running for %s ([[dd-]hh:]mm:ss). If this '
-                'value is lower than the test case, it must have been '
-                'restarted during the test.' % sl4a_uptime)
+                "SL4A has been running for %s ([[dd-]hh:]mm:ss). If this "
+                "value is lower than the test case, it must have been "
+                "restarted during the test." % sl4a_uptime
+            )
         else:
             report.warning(
-                'The SL4A scripting service is not running. SL4A may have '
-                'crashed, or have been terminated by the Android Runtime.')
+                "The SL4A scripting service is not running. SL4A may have "
+                "crashed, or have been terminated by the Android Runtime."
+            )
             return False
         return True
 
     def report_sl4a_session(self, sl4a_manager, session, report):
         """Reports the state of an SL4A session."""
         if session.server_port not in sl4a_manager.sl4a_ports_in_use:
-            report.warning('SL4A server port %s not found in set of open '
-                           'ports %s' % (session.server_port,
-                                         sl4a_manager.sl4a_ports_in_use))
+            report.warning(
+                "SL4A server port %s not found in set of open "
+                "ports %s" % (session.server_port, sl4a_manager.sl4a_ports_in_use)
+            )
             return False
 
         if session not in sl4a_manager.sessions.values():
-            report.warning('SL4A session %s over port %s is not managed by '
-                           'the SL4A Manager. This session is already dead.' %
-                           (session.uid, session.server_port))
+            report.warning(
+                "SL4A session %s over port %s is not managed by "
+                "the SL4A Manager. This session is already dead."
+                % (session.uid, session.server_port)
+            )
             return False
         return True
 
@@ -209,11 +228,11 @@
         self._accept_requests = False
         while self._current_request_count > 0:
             # Wait for other threads to finish.
-            time.sleep(.1)
+            time.sleep(0.1)
 
     def _get_report_ticket(self):
         """Returns the next ticket, or none if all tickets have been used."""
-        logging.debug('Getting ticket for SL4A error report.')
+        logging.debug("Getting ticket for SL4A error report.")
         with self._ticket_lock:
             self._ticket_number += 1
             ticket_number = self._ticket_number
diff --git a/src/antlion/controllers/sl4a_lib/event_dispatcher.py b/src/antlion/controllers/sl4a_lib/event_dispatcher.py
index 4cdce8f..503923c 100644
--- a/src/antlion/controllers/sl4a_lib/event_dispatcher.py
+++ b/src/antlion/controllers/sl4a_lib/event_dispatcher.py
@@ -65,8 +65,11 @@
 
         def _log_formatter(message):
             """Defines the formatting used in the logger."""
-            return '[E Dispatcher|%s|%s] %s' % (self._serial,
-                                                self._rpc_client.uid, message)
+            return "[E Dispatcher|%s|%s] %s" % (
+                self._serial,
+                self._rpc_client.uid,
+                message,
+            )
 
         self.log = logger.create_logger(_log_formatter)
 
@@ -84,32 +87,33 @@
                 event_obj = self._rpc_client.eventWait(60000, timeout=120)
             except rpc_client.Sl4aConnectionError as e:
                 if self._rpc_client.is_alive:
-                    self.log.warning('Closing due to closed session.')
+                    self.log.warning("Closing due to closed session.")
                     break
                 else:
-                    self.log.warning('Closing due to error: %s.' % e)
+                    self.log.warning("Closing due to error: %s." % e)
                     self.close()
                     raise e
             if not event_obj:
                 continue
-            elif 'name' not in event_obj:
-                self.log.error('Received Malformed event {}'.format(event_obj))
+            elif "name" not in event_obj:
+                self.log.error("Received Malformed event {}".format(event_obj))
                 continue
             else:
-                event_name = event_obj['name']
+                event_name = event_obj["name"]
             # if handler registered, process event
-            if event_name == 'EventDispatcherShutdown':
-                self.log.debug('Received shutdown signal.')
+            if event_name == "EventDispatcherShutdown":
+                self.log.debug("Received shutdown signal.")
                 # closeSl4aSession has been called, which closes the event
                 # dispatcher. Stop execution on this polling thread.
                 return
             if event_name in self._handlers:
                 self.log.debug(
-                    'Using handler %s for event: %r' %
-                    (self._handlers[event_name].__name__, event_obj))
+                    "Using handler %s for event: %r"
+                    % (self._handlers[event_name].__name__, event_obj)
+                )
                 self.handle_subscribed_event(event_obj, event_name)
             else:
-                self.log.debug('Queuing event: %r' % event_obj)
+                self.log.debug("Queuing event: %r" % event_obj)
                 self._lock.acquire()
                 if event_name in self._event_dict:  # otherwise, cache event
                     self._event_dict[event_name].put(event_obj)
@@ -136,13 +140,15 @@
                 handler for one type of event.
         """
         if self._started:
-            raise IllegalStateError('Cannot register service after polling is '
-                                    'started.')
+            raise IllegalStateError(
+                "Cannot register service after polling is " "started."
+            )
         self._lock.acquire()
         try:
             if event_name in self._handlers:
                 raise DuplicateError(
-                    'A handler for {} already exists'.format(event_name))
+                    "A handler for {} already exists".format(event_name)
+                )
             self._handlers[event_name] = (handler, args)
         finally:
             self._lock.release()
@@ -195,14 +201,14 @@
                 starts polling.
         """
         if not self._started:
-            raise IllegalStateError(
-                'Dispatcher needs to be started before popping.')
+            raise IllegalStateError("Dispatcher needs to be started before popping.")
 
         e_queue = self.get_event_q(event_name)
 
         if not e_queue:
             raise IllegalStateError(
-                'Failed to get an event queue for {}'.format(event_name))
+                "Failed to get an event queue for {}".format(event_name)
+            )
 
         try:
             # Block for timeout
@@ -215,17 +221,13 @@
                 # Block forever on event wait
                 return e_queue.get(True)
         except queue.Empty:
-            msg = 'Timeout after {}s waiting for event: {}'.format(
-                timeout, event_name)
+            msg = "Timeout after {}s waiting for event: {}".format(timeout, event_name)
             self.log.info(msg)
             raise queue.Empty(msg)
 
-    def wait_for_event(self,
-                       event_name,
-                       predicate,
-                       timeout=DEFAULT_TIMEOUT,
-                       *args,
-                       **kwargs):
+    def wait_for_event(
+        self, event_name, predicate, timeout=DEFAULT_TIMEOUT, *args, **kwargs
+    ):
         """Wait for an event that satisfies a predicate to appear.
 
         Continuously pop events of a particular name and check against the
@@ -252,15 +254,15 @@
         """
         deadline = time.time() + timeout
         ignored_events = []
-        consume_events = kwargs.pop('consume_ignored_events', True)
+        consume_events = kwargs.pop("consume_ignored_events", True)
         while True:
             event = None
             try:
                 event = self.pop_event(event_name, 1)
                 if consume_events:
-                    self.log.debug('Consuming event: %r' % event)
+                    self.log.debug("Consuming event: %r" % event)
                 else:
-                    self.log.debug('Peeking at event: %r' % event)
+                    self.log.debug("Peeking at event: %r" % event)
                     ignored_events.append(event)
             except queue.Empty:
                 pass
@@ -268,15 +270,17 @@
             if event and predicate(event, *args, **kwargs):
                 for ignored_event in ignored_events:
                     self.get_event_q(event_name).put(ignored_event)
-                self.log.debug('Matched event: %r with %s' %
-                               (event, predicate.__name__))
+                self.log.debug(
+                    "Matched event: %r with %s" % (event, predicate.__name__)
+                )
                 return event
 
             if time.time() > deadline:
                 for ignored_event in ignored_events:
                     self.get_event_q(event_name).put(ignored_event)
-                msg = 'Timeout after {}s waiting for event: {}'.format(
-                    timeout, event_name)
+                msg = "Timeout after {}s waiting for event: {}".format(
+                    timeout, event_name
+                )
                 self.log.info(msg)
                 raise queue.Empty(msg)
 
@@ -305,8 +309,7 @@
             queue.Empty: Raised if no event was found before time out.
         """
         if not self._started:
-            raise IllegalStateError(
-                "Dispatcher needs to be started before popping.")
+            raise IllegalStateError("Dispatcher needs to be started before popping.")
         deadline = time.time() + timeout
         while True:
             # TODO: fix the sleep loop
@@ -315,12 +318,13 @@
                 break
             time.sleep(freq)
         if len(results) == 0:
-            msg = 'Timeout after {}s waiting for event: {}'.format(
-                timeout, regex_pattern)
+            msg = "Timeout after {}s waiting for event: {}".format(
+                timeout, regex_pattern
+            )
             self.log.error(msg)
             raise queue.Empty(msg)
 
-        return sorted(results, key=lambda event: event['time'])
+        return sorted(results, key=lambda event: event["time"])
 
     def _match_and_pop(self, regex_pattern):
         """Pop one event from each of the event queues whose names
@@ -347,8 +351,7 @@
         Returns: A queue storing all the events of the specified name.
         """
         self._lock.acquire()
-        if (event_name not in self._event_dict
-                or self._event_dict[event_name] is None):
+        if event_name not in self._event_dict or self._event_dict[event_name] is None:
             self._event_dict[event_name] = queue.Queue()
         self._lock.release()
 
@@ -368,8 +371,9 @@
         handler, args = self._handlers[event_name]
         self._executor.submit(handler, event_obj, *args)
 
-    def _handle(self, event_handler, event_name, user_args, event_timeout,
-                cond, cond_timeout):
+    def _handle(
+        self, event_handler, event_name, user_args, event_timeout, cond, cond_timeout
+    ):
         """Pop an event of specified type and calls its handler on it. If
         condition is not None, block until condition is met or timeout.
         """
@@ -378,13 +382,15 @@
         event = self.pop_event(event_name, event_timeout)
         return event_handler(event, *user_args)
 
-    def handle_event(self,
-                     event_handler,
-                     event_name,
-                     user_args,
-                     event_timeout=None,
-                     cond=None,
-                     cond_timeout=None):
+    def handle_event(
+        self,
+        event_handler,
+        event_name,
+        user_args,
+        event_timeout=None,
+        cond=None,
+        cond_timeout=None,
+    ):
         """Handle events that don't have registered handlers
 
         In a new thread, poll one event of specified type from its queue and
@@ -408,9 +414,15 @@
                 If blocking call worker.result() is triggered, the handler
                 needs to return something to unblock.
         """
-        worker = self._executor.submit(self._handle, event_handler, event_name,
-                                       user_args, event_timeout, cond,
-                                       cond_timeout)
+        worker = self._executor.submit(
+            self._handle,
+            event_handler,
+            event_name,
+            user_args,
+            event_timeout,
+            cond,
+            cond_timeout,
+        )
         return worker
 
     def pop_all(self, event_name):
@@ -430,8 +442,9 @@
                 starts polling.
         """
         if not self._started:
-            raise IllegalStateError(("Dispatcher needs to be started before "
-                                     "popping."))
+            raise IllegalStateError(
+                ("Dispatcher needs to be started before " "popping.")
+            )
         results = []
         try:
             self._lock.acquire()
@@ -469,7 +482,7 @@
 
     def is_event_match_for_list(self, event, field, value_list):
         try:
-            value_in_event = event['data'][field]
+            value_in_event = event["data"][field]
         except KeyError:
             return False
         for value in value_list:
diff --git a/src/antlion/controllers/sl4a_lib/rpc_client.py b/src/antlion/controllers/sl4a_lib/rpc_client.py
index fd45c04..cc2cee2 100644
--- a/src/antlion/controllers/sl4a_lib/rpc_client.py
+++ b/src/antlion/controllers/sl4a_lib/rpc_client.py
@@ -28,6 +28,7 @@
 # The Session UID when a UID has not been received yet.
 UNKNOWN_UID = -1
 
+
 class Sl4aException(error.ActsError):
     """The base class for all SL4A exceptions."""
 
@@ -48,7 +49,7 @@
         data: The extra data, if any, returned by SL4A.
     """
 
-    def __init__(self, message, code=-1, data=None, rpc_name=''):
+    def __init__(self, message, code=-1, data=None, rpc_name=""):
         super().__init__()
         self.message = message
         self.code = code
@@ -60,11 +61,14 @@
 
     def __str__(self):
         if self.data:
-            return 'Error in RPC %s %s:%s:%s' % (self.rpc_name, self.code,
-                                                 self.message, self.data)
+            return "Error in RPC %s %s:%s:%s" % (
+                self.rpc_name,
+                self.code,
+                self.message,
+                self.data,
+            )
         else:
-            return 'Error in RPC %s %s:%s' % (self.rpc_name, self.code,
-                                              self.message)
+            return "Error in RPC %s %s:%s" % (self.rpc_name, self.code, self.message)
 
 
 class Sl4aConnectionError(Sl4aException):
@@ -73,9 +77,10 @@
 
 class Sl4aProtocolError(Sl4aException):
     """Raised when there an error in exchanging data with server on device."""
-    NO_RESPONSE_FROM_HANDSHAKE = 'No response from handshake.'
-    NO_RESPONSE_FROM_SERVER = 'No response from server.'
-    MISMATCHED_API_ID = 'Mismatched API id.'
+
+    NO_RESPONSE_FROM_HANDSHAKE = "No response from handshake."
+    NO_RESPONSE_FROM_SERVER = "No response from server."
+    MISMATCHED_API_ID = "Mismatched API id."
 
 
 class Sl4aNotInstalledError(Sl4aException):
@@ -98,6 +103,7 @@
             modify the thread pool size being used for self.future RPC calls.
         _log: The logger for this RpcClient.
     """
+
     """The default value for the maximum amount of connections for a client."""
     DEFAULT_MAX_CONNECTION = 15
 
@@ -112,7 +118,8 @@
         def __init__(self, rpc_client):
             self._rpc_client = rpc_client
             self._executor = futures.ThreadPoolExecutor(
-                max_workers=max(rpc_client.max_connections - 2, 1))
+                max_workers=max(rpc_client.max_connections - 2, 1)
+            )
 
         def rpc(self, name, *args, **kwargs):
             future = self._executor.submit(name, *args, **kwargs)
@@ -123,17 +130,20 @@
 
             def rpc_call(*args, **kwargs):
                 future = self._executor.submit(
-                    self._rpc_client.__getattr__(name), *args, **kwargs)
+                    self._rpc_client.__getattr__(name), *args, **kwargs
+                )
                 return future
 
             return rpc_call
 
-    def __init__(self,
-                 uid,
-                 serial,
-                 on_error_callback,
-                 _create_connection_func,
-                 max_connections=None):
+    def __init__(
+        self,
+        uid,
+        serial,
+        on_error_callback,
+        _create_connection_func,
+        max_connections=None,
+    ):
         """Creates a new RpcClient object.
 
         Args:
@@ -155,7 +165,7 @@
 
         def _log_formatter(message):
             """Formats the message to be logged."""
-            return '[RPC Service|%s|%s] %s' % (self._serial, self.uid, message)
+            return "[RPC Service|%s|%s] %s" % (self._serial, self.uid, message)
 
         self._log = logger.create_logger(_log_formatter)
 
@@ -172,13 +182,13 @@
         """Terminates all connections to the SL4A server."""
         if len(self._working_connections) > 0:
             self._log.warning(
-                '%s connections are still active, and waiting on '
-                'responses.Closing these connections now.' % len(
-                    self._working_connections))
+                "%s connections are still active, and waiting on "
+                "responses.Closing these connections now."
+                % len(self._working_connections)
+            )
         connections = self._free_connections + self._working_connections
         for connection in connections:
-            self._log.debug(
-                'Closing connection over ports %s' % connection.ports)
+            self._log.debug("Closing connection over ports %s" % connection.ports)
             connection.close()
         self._free_connections = []
         self._working_connections = []
@@ -201,17 +211,17 @@
                     self._working_connections.append(client)
                     return client
 
-            client_count = (len(self._free_connections) +
-                            len(self._working_connections))
+            client_count = len(self._free_connections) + len(self._working_connections)
             if client_count < self.max_connections:
                 with self._lock:
-                    client_count = (len(self._free_connections) +
-                                    len(self._working_connections))
+                    client_count = len(self._free_connections) + len(
+                        self._working_connections
+                    )
                     if client_count < self.max_connections:
                         client = self._create_connection_func(self.uid)
                         self._working_connections.append(client)
                         return client
-            time.sleep(.01)
+            time.sleep(0.01)
 
     def _release_working_connection(self, connection):
         """Marks a working client as free.
@@ -250,9 +260,9 @@
         timed_out = False
         if timeout:
             connection.set_timeout(timeout)
-        data = {'id': ticket, 'method': method, 'params': args}
+        data = {"id": ticket, "method": method, "params": args}
         request = json.dumps(data)
-        response = ''
+        response = ""
         try:
             for i in range(1, retries + 1):
                 connection.send_request(request)
@@ -261,36 +271,43 @@
                 if not response:
                     if i < retries:
                         self._log.warning(
-                            'No response for RPC method %s on iteration %s',
-                            method, i)
+                            "No response for RPC method %s on iteration %s", method, i
+                        )
                         continue
                     else:
                         self._log.exception(
-                            'No response for RPC method %s on iteration %s',
-                            method, i)
+                            "No response for RPC method %s on iteration %s", method, i
+                        )
                         self.on_error(connection)
                         raise Sl4aProtocolError(
-                            Sl4aProtocolError.NO_RESPONSE_FROM_SERVER)
+                            Sl4aProtocolError.NO_RESPONSE_FROM_SERVER
+                        )
                 else:
                     break
         except BrokenPipeError as e:
             if self.is_alive:
-                self._log.exception('The device disconnected during RPC call '
-                                    '%s. Please check the logcat for a crash '
-                                    'or disconnect.', method)
+                self._log.exception(
+                    "The device disconnected during RPC call "
+                    "%s. Please check the logcat for a crash "
+                    "or disconnect.",
+                    method,
+                )
                 self.on_error(connection)
             else:
-                self._log.warning('The connection was killed during cleanup:')
+                self._log.warning("The connection was killed during cleanup:")
                 self._log.warning(e)
             raise Sl4aConnectionError(e)
         except socket.timeout as err:
             # If a socket connection has timed out, the socket can no longer be
             # used. Close it out and remove the socket from the connection pool.
             timed_out = True
-            self._log.warning('RPC "%s" (id: %s) timed out after %s seconds.',
-                              method, ticket, timeout or SOCKET_TIMEOUT)
-            self._log.debug(
-                'Closing timed out connection over %s' % connection.ports)
+            self._log.warning(
+                'RPC "%s" (id: %s) timed out after %s seconds.',
+                method,
+                ticket,
+                timeout or SOCKET_TIMEOUT,
+            )
+            self._log.debug("Closing timed out connection over %s" % connection.ports)
             connection.close()
             self._working_connections.remove(connection)
             # Re-raise the error as an SL4A Error so end users can process it.
@@ -300,26 +317,29 @@
                 if timeout:
                     connection.set_timeout(SOCKET_TIMEOUT)
                 self._release_working_connection(connection)
-        result = json.loads(str(response, encoding='utf8'))
+        result = json.loads(str(response, encoding="utf8"))
 
-        if result['error']:
-            error_object = result['error']
+        if result["error"]:
+            error_object = result["error"]
             if isinstance(error_object, dict):
                 # Uses JSON-RPC 2.0 Format
-                sl4a_api_error = Sl4aApiError(error_object.get('message', None),
-                                              error_object.get('code', -1),
-                                              error_object.get('data', {}),
-                                              rpc_name=method)
+                sl4a_api_error = Sl4aApiError(
+                    error_object.get("message", None),
+                    error_object.get("code", -1),
+                    error_object.get("data", {}),
+                    rpc_name=method,
+                )
             else:
                 # Fallback on JSON-RPC 1.0 Format
                 sl4a_api_error = Sl4aApiError(error_object, rpc_name=method)
             self._log.warning(sl4a_api_error)
             raise sl4a_api_error
-        if result['id'] != ticket:
-            self._log.error('RPC method %s with mismatched api id %s', method,
-                            result['id'])
+        if result["id"] != ticket:
+            self._log.error(
+                "RPC method %s with mismatched api id %s", method, result["id"]
+            )
             raise Sl4aProtocolError(Sl4aProtocolError.MISMATCHED_API_ID)
-        return result['result']
+        return result["result"]
 
     @property
     def future(self):
@@ -353,6 +373,7 @@
 
         if not self.is_alive:
             raise Sl4aStartError(
-                'This SL4A session has already been terminated. You must '
-                'create a new session to continue.')
+                "This SL4A session has already been terminated. You must "
+                "create a new session to continue."
+            )
         return rpc_call
diff --git a/src/antlion/controllers/sl4a_lib/rpc_connection.py b/src/antlion/controllers/sl4a_lib/rpc_connection.py
index b8cb8cf..6b9bf25 100644
--- a/src/antlion/controllers/sl4a_lib/rpc_connection.py
+++ b/src/antlion/controllers/sl4a_lib/rpc_connection.py
@@ -30,8 +30,9 @@
     INIT: Initializes a new sessions in sl4a.
     CONTINUE: Creates a connection.
     """
-    INIT = 'initiate'
-    CONTINUE = 'continue'
+
+    INIT = "initiate"
+    CONTINUE = "continue"
 
 
 class RpcConnection(object):
@@ -58,9 +59,12 @@
 
         def _log_formatter(message):
             """Defines the formatting used in the logger."""
-            return '[SL4A Client|%s|%s|%s] %s' % (self.adb.serial,
-                                                  self.ports.client_port,
-                                                  self.uid, message)
+            return "[SL4A Client|%s|%s|%s] %s" % (
+                self.adb.serial,
+                self.ports.client_port,
+                self.uid,
+                message,
+            )
 
         self.log = logger.create_logger(_log_formatter)
 
@@ -84,19 +88,19 @@
         try:
             resp = self._cmd(start_command)
         except socket.timeout as e:
-            self.log.error('Failed to open socket connection: %s', e)
+            self.log.error("Failed to open socket connection: %s", e)
             raise
         if not resp:
             raise rpc_client.Sl4aProtocolError(
-                rpc_client.Sl4aProtocolError.NO_RESPONSE_FROM_HANDSHAKE)
-        result = json.loads(str(resp, encoding='utf8'))
-        if result['status']:
-            self.uid = result['uid']
+                rpc_client.Sl4aProtocolError.NO_RESPONSE_FROM_HANDSHAKE
+            )
+        result = json.loads(str(resp, encoding="utf8"))
+        if result["status"]:
+            self.uid = result["uid"]
         else:
-            self.log.warning(
-                'UID not received for connection %s.' % self.ports)
+            self.log.warning("UID not received for connection %s." % self.ports)
             self.uid = UNKNOWN_UID
-        self.log.debug('Created connection over: %s.' % self.ports)
+        self.log.debug("Created connection over: %s." % self.ports)
 
     def _cmd(self, command):
         """Sends an session protocol command to SL4A to establish communication.
@@ -107,7 +111,7 @@
         Returns:
             The line that was written back.
         """
-        self.send_request(json.dumps({'cmd': command, 'uid': self.uid}))
+        self.send_request(json.dumps({"cmd": command, "uid": self.uid}))
         return self.get_response()
 
     def get_new_ticket(self):
@@ -123,14 +127,14 @@
 
     def send_request(self, request):
         """Sends a request over the connection."""
-        self._socket_file.write(request.encode('utf8') + b'\n')
+        self._socket_file.write(request.encode("utf8") + b"\n")
         self._socket_file.flush()
-        self.log.debug('Sent: ' + request)
+        self.log.debug("Sent: " + request)
 
     def get_response(self):
         """Returns the first response sent back to the client."""
         data = self._socket_file.readline()
-        self.log.debug('Received: ' + data.decode('utf8', errors='replace'))
+        self.log.debug("Received: " + data.decode("utf8", errors="replace"))
         return data
 
     def close(self):
diff --git a/src/antlion/controllers/sl4a_lib/sl4a_manager.py b/src/antlion/controllers/sl4a_lib/sl4a_manager.py
index 8749702..f3d7047 100644
--- a/src/antlion/controllers/sl4a_lib/sl4a_manager.py
+++ b/src/antlion/controllers/sl4a_lib/sl4a_manager.py
@@ -22,25 +22,27 @@
 from antlion.controllers.sl4a_lib import sl4a_session
 from antlion.controllers.sl4a_lib import error_reporter
 
-ATTEMPT_INTERVAL = .25
+ATTEMPT_INTERVAL = 0.25
 MAX_WAIT_ON_SERVER_SECONDS = 5
 
-SL4A_PKG_NAME = 'com.googlecode.android_scripting'
+SL4A_PKG_NAME = "com.googlecode.android_scripting"
 
 _SL4A_LAUNCH_SERVER_CMD = (
-    'am startservice -a com.googlecode.android_scripting.action.LAUNCH_SERVER '
-    '--ei com.googlecode.android_scripting.extra.USE_SERVICE_PORT %s '
-    'com.googlecode.android_scripting/.service.ScriptingLayerService')
+    "am startservice -a com.googlecode.android_scripting.action.LAUNCH_SERVER "
+    "--ei com.googlecode.android_scripting.extra.USE_SERVICE_PORT %s "
+    "com.googlecode.android_scripting/.service.ScriptingLayerService"
+)
 
 _SL4A_CLOSE_SERVER_CMD = (
-    'am startservice -a com.googlecode.android_scripting.action.KILL_PROCESS '
-    '--ei com.googlecode.android_scripting.extra.PROXY_PORT %s '
-    'com.googlecode.android_scripting/.service.ScriptingLayerService')
+    "am startservice -a com.googlecode.android_scripting.action.KILL_PROCESS "
+    "--ei com.googlecode.android_scripting.extra.PROXY_PORT %s "
+    "com.googlecode.android_scripting/.service.ScriptingLayerService"
+)
 
 # The command for finding SL4A's server port as root.
 _SL4A_ROOT_FIND_PORT_CMD = (
     # Get all open, listening ports, and their process names
-    'ss -l -p -n | '
+    "ss -l -p -n | "
     # Find all open TCP ports for SL4A
     'grep "tcp.*droid_scripting" | '
     # Shorten all whitespace to a single space character
@@ -48,12 +50,13 @@
     # Grab the 5th column (which is server:port)
     'cut -d " " -f 5 |'
     # Only grab the port
-    'sed s/.*://g')
+    "sed s/.*://g"
+)
 
 # The command for finding SL4A's server port without root.
 _SL4A_USER_FIND_PORT_CMD = (
     # Get all open, listening ports, and their process names
-    'ss -l -p -n | '
+    "ss -l -p -n | "
     # Find all open ports exposed to the public. This can produce false
     # positives since users cannot read the process associated with the port.
     'grep -e "tcp.*::ffff:127\.0\.0\.1:" | '
@@ -62,12 +65,13 @@
     # Grab the 5th column (which is server:port)
     'cut -d " " -f 5 |'
     # Only grab the port
-    'sed s/.*://g')
+    "sed s/.*://g"
+)
 
 # The command that begins the SL4A ScriptingLayerService.
 _SL4A_START_SERVICE_CMD = (
-    'am startservice '
-    'com.googlecode.android_scripting/.service.ScriptingLayerService')
+    "am startservice " "com.googlecode.android_scripting/.service.ScriptingLayerService"
+)
 
 # Maps device serials to their SL4A Manager. This is done to prevent multiple
 # Sl4aManagers from existing for the same device.
@@ -82,8 +86,9 @@
     """
     if adb.serial in _all_sl4a_managers:
         _all_sl4a_managers[adb.serial].log.warning(
-            'Attempted to return multiple SL4AManagers on the same device. '
-            'Returning pre-existing SL4AManager instead.')
+            "Attempted to return multiple SL4AManagers on the same device. "
+            "Returning pre-existing SL4AManager instead."
+        )
         return _all_sl4a_managers[adb.serial]
     else:
         manager = Sl4aManager(adb)
@@ -111,12 +116,12 @@
         self._listen_for_port_lock = threading.Lock()
         self._sl4a_ports = set()
         self.adb = adb
-        self.log = logger.create_logger(lambda msg: '[SL4A Manager|%s] %s' % (
-            adb.serial, msg))
+        self.log = logger.create_logger(
+            lambda msg: "[SL4A Manager|%s] %s" % (adb.serial, msg)
+        )
         self.sessions = {}
         self._started = False
-        self.error_reporter = error_reporter.ErrorReporter('SL4A %s' %
-                                                           adb.serial)
+        self.error_reporter = error_reporter.ErrorReporter("SL4A %s" % adb.serial)
 
     @property
     def sl4a_ports_in_use(self):
@@ -162,9 +167,9 @@
                 return port
 
         raise rpc_client.Sl4aConnectionError(
-            'Unable to find a valid open port for a new server connection. '
-            'Expected port: %s. Open ports: %s' %
-            (device_port, self._sl4a_ports))
+            "Unable to find a valid open port for a new server connection. "
+            "Expected port: %s. Open ports: %s" % (device_port, self._sl4a_ports)
+        )
 
     def _get_all_ports_command(self):
         """Returns the list of all ports from the command to get ports."""
@@ -177,8 +182,10 @@
         else:
             # TODO(markdr): When root is unavailable, search logcat output for
             #               the port the server has opened.
-            self.log.warning('Device cannot be put into root mode. SL4A '
-                             'server connections cannot be verified.')
+            self.log.warning(
+                "Device cannot be put into root mode. SL4A "
+                "server connections cannot be verified."
+            )
             return _SL4A_USER_FIND_PORT_CMD
 
     def _get_all_ports(self):
@@ -190,7 +197,7 @@
         Will return none if no port is found.
         """
         possible_ports = self._get_all_ports()
-        self.log.debug('SL4A Ports found: %s' % possible_ports)
+        self.log.debug("SL4A Ports found: %s" % possible_ports)
 
         # Acquire the lock. We lock this method because if multiple threads
         # attempt to get a server at the same time, they can potentially find
@@ -204,8 +211,7 @@
 
     def is_sl4a_installed(self):
         """Returns True if SL4A is installed on the AndroidDevice."""
-        return bool(
-            self.adb.shell('pm path %s' % SL4A_PKG_NAME, ignore_status=True))
+        return bool(self.adb.shell("pm path %s" % SL4A_PKG_NAME, ignore_status=True))
 
     def start_sl4a_service(self):
         """Starts the SL4A Service on the device.
@@ -217,14 +223,14 @@
             self._started = True
             if not self.is_sl4a_installed():
                 raise rpc_client.Sl4aNotInstalledError(
-                    'SL4A is not installed on device %s' % self.adb.serial)
+                    "SL4A is not installed on device %s" % self.adb.serial
+                )
             if self.adb.shell('(ps | grep "S %s") || true' % SL4A_PKG_NAME):
                 # Close all SL4A servers not opened by this manager.
                 # TODO(markdr): revert back to closing all ports after
                 # b/76147680 is resolved.
-                self.adb.shell('kill -9 $(pidof %s)' % SL4A_PKG_NAME)
-            self.adb.shell(
-                'settings put global hidden_api_blacklist_exemptions "*"')
+                self.adb.shell("kill -9 $(pidof %s)" % SL4A_PKG_NAME)
+            self.adb.shell('settings put global hidden_api_blacklist_exemptions "*"')
             # Start the service if it is not up already.
             self.adb.shell(_SL4A_START_SERVICE_CMD)
 
@@ -239,11 +245,9 @@
         else:
             return server_port
 
-    def create_session(self,
-                       max_connections=None,
-                       client_port=0,
-                       forwarded_port=0,
-                       server_port=None):
+    def create_session(
+        self, max_connections=None, client_port=0, forwarded_port=0, server_port=None
+    ):
         """Creates an SL4A server with the given ports if possible.
 
         The ports are not guaranteed to be available for use. If the port
@@ -264,30 +268,32 @@
         if server_port is None:
             # If a session already exists, use the same server.
             if len(self.sessions) > 0:
-                server_port = self.sessions[sorted(
-                    self.sessions.keys())[0]].server_port
+                server_port = self.sessions[sorted(self.sessions.keys())[0]].server_port
             # Otherwise, open a new server on a random port.
             else:
                 server_port = 0
         self.log.debug(
-            "Creating SL4A session client_port={}, forwarded_port={}, server_port={}"
-            .format(client_port, forwarded_port, server_port))
+            "Creating SL4A session client_port={}, forwarded_port={}, server_port={}".format(
+                client_port, forwarded_port, server_port
+            )
+        )
         self.start_sl4a_service()
-        session = sl4a_session.Sl4aSession(self.adb,
-                                           client_port,
-                                           server_port,
-                                           self.obtain_sl4a_server,
-                                           self.diagnose_failure,
-                                           forwarded_port,
-                                           max_connections=max_connections)
+        session = sl4a_session.Sl4aSession(
+            self.adb,
+            client_port,
+            server_port,
+            self.obtain_sl4a_server,
+            self.diagnose_failure,
+            forwarded_port,
+            max_connections=max_connections,
+        )
         self.sessions[session.uid] = session
         return session
 
     def stop_service(self):
         """Stops The SL4A Service. Force-stops the SL4A apk."""
         try:
-            self.adb.shell('am force-stop %s' % SL4A_PKG_NAME,
-                           ignore_status=True)
+            self.adb.shell("am force-stop %s" % SL4A_PKG_NAME, ignore_status=True)
         except Exception as e:
             self.log.warning("Fail to stop package %s: %s", SL4A_PKG_NAME, e)
         self._started = False
@@ -312,6 +318,7 @@
 
         if time_left <= 0:
             self.log.warning(
-                'Unable to close all un-managed servers! Server ports that are '
-                'still open are %s' % self._get_open_listening_port())
+                "Unable to close all un-managed servers! Server ports that are "
+                "still open are %s" % self._get_open_listening_port()
+            )
         self._sl4a_ports = set()
diff --git a/src/antlion/controllers/sl4a_lib/sl4a_ports.py b/src/antlion/controllers/sl4a_lib/sl4a_ports.py
index d0172cc..db9917e 100644
--- a/src/antlion/controllers/sl4a_lib/sl4a_ports.py
+++ b/src/antlion/controllers/sl4a_lib/sl4a_ports.py
@@ -30,5 +30,8 @@
         self.server_port = server_port
 
     def __str__(self):
-        return '(%s, %s, %s)' % (self.client_port, self.forwarded_port,
-                                 self.server_port)
+        return "(%s, %s, %s)" % (
+            self.client_port,
+            self.forwarded_port,
+            self.server_port,
+        )
diff --git a/src/antlion/controllers/sl4a_lib/sl4a_session.py b/src/antlion/controllers/sl4a_lib/sl4a_session.py
index e84def2..27edc48 100644
--- a/src/antlion/controllers/sl4a_lib/sl4a_session.py
+++ b/src/antlion/controllers/sl4a_lib/sl4a_session.py
@@ -49,14 +49,16 @@
             value is only unique during the lifetime of the SL4A apk.
     """
 
-    def __init__(self,
-                 adb,
-                 host_port,
-                 device_port,
-                 get_server_port_func,
-                 on_error_callback,
-                 forwarded_port=0,
-                 max_connections=None):
+    def __init__(
+        self,
+        adb,
+        host_port,
+        device_port,
+        get_server_port_func,
+        on_error_callback,
+        forwarded_port=0,
+        max_connections=None,
+    ):
         """Creates an SL4A Session.
 
         Args:
@@ -77,8 +79,7 @@
         self.adb = adb
 
         def _log_formatter(message):
-            return '[SL4A Session|%s|%s] %s' % (self.adb.serial, self.uid,
-                                                message)
+            return "[SL4A Session|%s|%s] %s" % (self.adb.serial, self.uid, message)
 
         self.log = logger.create_logger(_log_formatter)
 
@@ -89,17 +90,22 @@
         self._on_error_callback = on_error_callback
 
         connection_creator = self._rpc_connection_creator(host_port)
-        self.rpc_client = rpc_client.RpcClient(self.uid,
-                                               self.adb.serial,
-                                               self.diagnose_failure,
-                                               connection_creator,
-                                               max_connections=max_connections)
+        self.rpc_client = rpc_client.RpcClient(
+            self.uid,
+            self.adb.serial,
+            self.diagnose_failure,
+            connection_creator,
+            max_connections=max_connections,
+        )
 
     def _rpc_connection_creator(self, host_port):
         def create_client(uid):
-            return self._create_rpc_connection(ports=sl4a_ports.Sl4aPorts(
-                host_port, self.forwarded_port, self.server_port),
-                                               uid=uid)
+            return self._create_rpc_connection(
+                ports=sl4a_ports.Sl4aPorts(
+                    host_port, self.forwarded_port, self.server_port
+                ),
+                uid=uid,
+            )
 
         return create_client
 
@@ -121,19 +127,21 @@
         """
         if self.adb.get_version_number() < 37 and hinted_port == 0:
             self.log.error(
-                'The current version of ADB does not automatically provide a '
-                'port to forward. Please upgrade ADB to version 1.0.37 or '
-                'higher.')
-            raise Sl4aStartError('Unable to forward a port to the device.')
+                "The current version of ADB does not automatically provide a "
+                "port to forward. Please upgrade ADB to version 1.0.37 or "
+                "higher."
+            )
+            raise Sl4aStartError("Unable to forward a port to the device.")
         else:
             try:
                 return self.adb.tcp_forward(hinted_port, server_port)
             except AdbError as e:
-                if 'cannot bind listener' in e.stderr:
+                if "cannot bind listener" in e.stderr:
                     self.log.warning(
-                        'Unable to use %s to forward to device port %s due to: '
-                        '"%s". Attempting to choose a random port instead.' %
-                        (hinted_port, server_port, e.stderr))
+                        "Unable to use %s to forward to device port %s due to: "
+                        '"%s". Attempting to choose a random port instead.'
+                        % (hinted_port, server_port, e.stderr)
+                    )
                     # Call this method again, but this time with no hinted port.
                     return self._create_forwarded_port(server_port)
                 raise e
@@ -160,13 +168,12 @@
         self.server_port = ports.server_port
         # Forward the device port to the host.
         ports.forwarded_port = self._create_forwarded_port(
-            ports.server_port, hinted_port=ports.forwarded_port)
+            ports.server_port, hinted_port=ports.forwarded_port
+        )
         client_socket, fd = self._create_client_side_connection(ports)
-        client = rpc_connection.RpcConnection(self.adb,
-                                              ports,
-                                              client_socket,
-                                              fd,
-                                              uid=uid)
+        client = rpc_connection.RpcConnection(
+            self.adb, ports, client_socket, fd, uid=uid
+        )
         client.open()
         if uid == UNKNOWN_UID:
             self.uid = client.uid
@@ -180,7 +187,8 @@
         """Returns the EventDispatcher for this Sl4aSession."""
         if self._event_dispatcher is None:
             self._event_dispatcher = event_dispatcher.EventDispatcher(
-                self.adb.serial, self.rpc_client)
+                self.adb.serial, self.rpc_client
+            )
         return self._event_dispatcher
 
     def _create_client_side_connection(self, ports):
@@ -202,9 +210,10 @@
             except OSError as e:
                 # If the port is in use, log and ask for any open port.
                 if e.errno == errno.EADDRINUSE:
-                    self.log.warning('Port %s is already in use on the host. '
-                                     'Generating a random port.' %
-                                     ports.client_port)
+                    self.log.warning(
+                        "Port %s is already in use on the host. "
+                        "Generating a random port." % ports.client_port
+                    )
                     ports.client_port = 0
                     return self._create_client_side_connection(ports)
                 raise
@@ -212,11 +221,12 @@
         # Verify and obtain the port opened by SL4A.
         try:
             # Connect to the port that has been forwarded to the device.
-            client_socket.connect(('127.0.0.1', ports.forwarded_port))
+            client_socket.connect(("127.0.0.1", ports.forwarded_port))
         except socket.timeout:
             raise rpc_client.Sl4aConnectionError(
-                'SL4A has not connected over the specified port within the '
-                'timeout of %s seconds.' % SOCKET_TIMEOUT)
+                "SL4A has not connected over the specified port within the "
+                "timeout of %s seconds." % SOCKET_TIMEOUT
+            )
         except socket.error as e:
             # In extreme, unlikely cases, a socket error with
             # errno.EADDRNOTAVAIL can be raised when a desired host_port is
@@ -228,7 +238,7 @@
                 return self._create_client_side_connection(ports)
             raise
         ports.client_port = client_socket.getsockname()[1]
-        return client_socket, client_socket.makefile(mode='brw')
+        return client_socket, client_socket.makefile(mode="brw")
 
     def terminate(self):
         """Terminates the session.
@@ -238,12 +248,11 @@
         """
         with self._terminate_lock:
             if not self._terminated:
-                self.log.debug('Terminating Session.')
+                self.log.debug("Terminating Session.")
                 try:
                     self.rpc_client.closeSl4aSession()
                 except Exception as e:
-                    if "SL4A session has already been terminated" not in str(
-                            e):
+                    if "SL4A session has already been terminated" not in str(e):
                         self.log.warning(e)
                 # Must be set after closeSl4aSession so the rpc_client does not
                 # think the session has closed.
diff --git a/src/antlion/controllers/sl4a_lib/sl4a_types.py b/src/antlion/controllers/sl4a_lib/sl4a_types.py
index 53ea0f0..434ff92 100644
--- a/src/antlion/controllers/sl4a_lib/sl4a_types.py
+++ b/src/antlion/controllers/sl4a_lib/sl4a_types.py
@@ -31,16 +31,18 @@
 class Sl4aNetworkInfo(DictObject):
     """SL4A equivalent of an Android NetworkInfo Object"""
 
-    def __init__(self,
-                 isAvailable=None,
-                 isConnected=None,
-                 isFailover=None,
-                 isRoaming=None,
-                 ExtraInfo=None,
-                 FailedReason=None,
-                 TypeName=None,
-                 SubtypeName=None,
-                 State=None):
+    def __init__(
+        self,
+        isAvailable=None,
+        isConnected=None,
+        isFailover=None,
+        isRoaming=None,
+        ExtraInfo=None,
+        FailedReason=None,
+        TypeName=None,
+        SubtypeName=None,
+        State=None,
+    ):
         DictObject.__init__(
             self,
             isAvailable=isAvailable,
@@ -51,4 +53,5 @@
             FailedReason=FailedReason,
             TypeName=TypeName,
             SubtypeName=SubtypeName,
-            State=State)
+            State=State,
+        )
diff --git a/src/antlion/controllers/sniffer.py b/src/antlion/controllers/sniffer.py
index 88b43d9..e87a547 100644
--- a/src/antlion/controllers/sniffer.py
+++ b/src/antlion/controllers/sniffer.py
@@ -41,18 +41,17 @@
         interface = c["Interface"]
         base_configs = c["BaseConfigs"]
         module_name = "antlion.controllers.sniffer_lib.{}.{}".format(
-            sniffer_type, sniffer_subtype)
+            sniffer_type, sniffer_subtype
+        )
         module = importlib.import_module(module_name)
         objs.append(
-            module.Sniffer(interface,
-                           logging.getLogger(),
-                           base_configs=base_configs))
+            module.Sniffer(interface, logging.getLogger(), base_configs=base_configs)
+        )
     return objs
 
 
 def destroy(objs):
-    """Destroys the sniffers and terminates any ongoing capture sessions.
-    """
+    """Destroys the sniffers and terminates any ongoing capture sessions."""
     for sniffer in objs:
         try:
             sniffer.stop_capture()
@@ -167,11 +166,13 @@
         """
         raise NotImplementedError("Base class should not be called directly!")
 
-    def start_capture(self,
-                      override_configs=None,
-                      additional_args=None,
-                      duration=None,
-                      packet_count=None):
+    def start_capture(
+        self,
+        override_configs=None,
+        additional_args=None,
+        duration=None,
+        packet_count=None,
+    ):
         """This function starts a capture which is saved to the specified file
         path.
 
diff --git a/src/antlion/controllers/sniffer_lib/local/local_base.py b/src/antlion/controllers/sniffer_lib/local/local_base.py
index 5a6af09..8873350 100644
--- a/src/antlion/controllers/sniffer_lib/local/local_base.py
+++ b/src/antlion/controllers/sniffer_lib/local/local_base.py
@@ -13,7 +13,6 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
 """
 Class for Local sniffers - i.e. running on the local machine.
 
@@ -30,6 +29,7 @@
 from antlion import utils
 from antlion.controllers import sniffer
 
+
 class SnifferLocalBase(sniffer.Sniffer):
     """This class defines the common behaviors of WLAN sniffers running on
     WLAN interfaces of the local machine.
@@ -39,8 +39,7 @@
     """
 
     def __init__(self, interface, logger, base_configs=None):
-        """See base class documentation
-        """
+        """See base class documentation"""
         self._base_configs = None
         self._capture_file_path = ""
         self._interface = ""
@@ -61,13 +60,11 @@
             raise sniffer.ExecutionError(err)
 
     def get_interface(self):
-        """See base class documentation
-        """
+        """See base class documentation"""
         return self._interface
 
     def get_type(self):
-        """See base class documentation
-        """
+        """See base class documentation"""
         return "local"
 
     def get_capture_file(self):
@@ -87,13 +84,16 @@
 
         if sniffer.Sniffer.CONFIG_KEY_CHANNEL in final_configs:
             try:
-                utils.exe_cmd("iwconfig", self._interface, "channel",
-                        str(final_configs[sniffer.Sniffer.CONFIG_KEY_CHANNEL]))
+                utils.exe_cmd(
+                    "iwconfig",
+                    self._interface,
+                    "channel",
+                    str(final_configs[sniffer.Sniffer.CONFIG_KEY_CHANNEL]),
+                )
             except Exception as err:
                 raise sniffer.ExecutionError(err)
 
-    def _get_command_line(self, additional_args=None, duration=None,
-                          packet_count=None):
+    def _get_command_line(self, additional_args=None, duration=None, packet_count=None):
         """Utility function to be implemented by every child class - which
         are the concrete sniffer classes. Each sniffer-specific class should
         derive the command line to execute its sniffer based on the specified
@@ -108,44 +108,51 @@
         self._process = None
         shutil.move(self._temp_capture_file_path, self._capture_file_path)
 
-    def start_capture(self, override_configs=None,
-                      additional_args=None, duration=None,
-                      packet_count=None):
-        """See base class documentation
-        """
+    def start_capture(
+        self,
+        override_configs=None,
+        additional_args=None,
+        duration=None,
+        packet_count=None,
+    ):
+        """See base class documentation"""
         if self._process is not None:
             raise sniffer.InvalidOperationError(
-                    "Trying to start a sniff while another is still running!")
-        capture_dir = os.path.join(self._logger.log_path,
-                                   "Sniffer-{}".format(self._interface))
+                "Trying to start a sniff while another is still running!"
+            )
+        capture_dir = os.path.join(
+            self._logger.log_path, "Sniffer-{}".format(self._interface)
+        )
         os.makedirs(capture_dir, exist_ok=True)
-        self._capture_file_path = os.path.join(capture_dir,
-                      "capture_{}.pcap".format(logger.get_log_file_timestamp()))
+        self._capture_file_path = os.path.join(
+            capture_dir, "capture_{}.pcap".format(logger.get_log_file_timestamp())
+        )
 
         self._pre_capture_config(override_configs)
         _, self._temp_capture_file_path = tempfile.mkstemp(suffix=".pcap")
 
-        cmd = self._get_command_line(additional_args=additional_args,
-                                duration=duration, packet_count=packet_count)
+        cmd = self._get_command_line(
+            additional_args=additional_args,
+            duration=duration,
+            packet_count=packet_count,
+        )
 
         self._process = utils.start_standing_subprocess(cmd)
         return sniffer.ActiveCaptureContext(self, duration)
 
     def stop_capture(self):
-        """See base class documentation
-        """
+        """See base class documentation"""
         if self._process is None:
-            raise sniffer.InvalidOperationError(
-                                      "Trying to stop a non-started process")
+            raise sniffer.InvalidOperationError("Trying to stop a non-started process")
         utils.stop_standing_subprocess(self._process, kill_signal=signal.SIGINT)
         self._post_process()
 
     def wait_for_capture(self, timeout=None):
-        """See base class documentation
-        """
+        """See base class documentation"""
         if self._process is None:
             raise sniffer.InvalidOperationError(
-                                  "Trying to wait on a non-started process")
+                "Trying to wait on a non-started process"
+            )
         try:
             utils.wait_for_standing_subprocess(self._process, timeout)
             self._post_process()
diff --git a/src/antlion/controllers/sniffer_lib/local/tcpdump.py b/src/antlion/controllers/sniffer_lib/local/tcpdump.py
index 39f8720..85622dc 100644
--- a/src/antlion/controllers/sniffer_lib/local/tcpdump.py
+++ b/src/antlion/controllers/sniffer_lib/local/tcpdump.py
@@ -20,38 +20,32 @@
 
 
 class Sniffer(local_base.SnifferLocalBase):
-    """This class defines a sniffer which uses tcpdump as its back-end
-    """
+    """This class defines a sniffer which uses tcpdump as its back-end"""
 
     def __init__(self, config_path, logger, base_configs=None):
-        """See base class documentation
-        """
+        """See base class documentation"""
         self._executable_path = None
 
         super(local_base.SnifferLocalBase).__init__(
-            config_path, logger, base_configs=base_configs)
+            config_path, logger, base_configs=base_configs
+        )
 
         self._executable_path = shutil.which("tcpdump")
         if self._executable_path is None:
-            raise sniffer.SnifferError(
-                "Cannot find a path to the 'tcpdump' executable")
+            raise sniffer.SnifferError("Cannot find a path to the 'tcpdump' executable")
 
     def get_descriptor(self):
-        """See base class documentation
-        """
+        """See base class documentation"""
         return "local-tcpdump-{}".format(self._interface)
 
     def get_subtype(self):
-        """See base class documentation
-        """
+        """See base class documentation"""
         return "tcpdump"
 
-    def _get_command_line(self,
-                          additional_args=None,
-                          duration=None,
-                          packet_count=None):
-        cmd = "{} -i {} -w {}".format(self._executable_path, self._interface,
-                                      self._temp_capture_file_path)
+    def _get_command_line(self, additional_args=None, duration=None, packet_count=None):
+        cmd = "{} -i {} -w {}".format(
+            self._executable_path, self._interface, self._temp_capture_file_path
+        )
         if packet_count is not None:
             cmd = "{} -c {}".format(cmd, packet_count)
         if additional_args is not None:
diff --git a/src/antlion/controllers/sniffer_lib/local/tshark.py b/src/antlion/controllers/sniffer_lib/local/tshark.py
index b95aa7d..dd79eed 100644
--- a/src/antlion/controllers/sniffer_lib/local/tshark.py
+++ b/src/antlion/controllers/sniffer_lib/local/tshark.py
@@ -18,37 +18,37 @@
 from antlion.controllers import sniffer
 from antlion.controllers.sniffer_lib.local import local_base
 
+
 class Sniffer(local_base.SnifferLocalBase):
-    """This class defines a sniffer which uses tshark as its back-end
-    """
+    """This class defines a sniffer which uses tshark as its back-end"""
 
     def __init__(self, config_path, logger, base_configs=None):
-        """See base class documentation
-        """
+        """See base class documentation"""
         self._executable_path = None
 
         super().__init__(config_path, logger, base_configs=base_configs)
 
-        self._executable_path = (shutil.which("tshark")
-                                 or shutil.which("/usr/local/bin/tshark"))
+        self._executable_path = shutil.which("tshark") or shutil.which(
+            "/usr/local/bin/tshark"
+        )
         if self._executable_path is None:
-            raise sniffer.SnifferError("Cannot find a path to the 'tshark' "
-                                 "executable (or to '/usr/local/bin/tshark')")
+            raise sniffer.SnifferError(
+                "Cannot find a path to the 'tshark' "
+                "executable (or to '/usr/local/bin/tshark')"
+            )
 
     def get_descriptor(self):
-        """See base class documentation
-        """
+        """See base class documentation"""
         return "local-tshark-{}-ch{}".format(self._interface)
 
     def get_subtype(self):
-        """See base class documentation
-        """
+        """See base class documentation"""
         return "tshark"
 
-    def _get_command_line(self, additional_args=None, duration=None,
-                          packet_count=None):
-        cmd = "{} -i {} -w {}".format(self._executable_path, self._interface,
-                                      self._temp_capture_file_path)
+    def _get_command_line(self, additional_args=None, duration=None, packet_count=None):
+        cmd = "{} -i {} -w {}".format(
+            self._executable_path, self._interface, self._temp_capture_file_path
+        )
         if duration is not None:
             cmd = "{} -a duration:{}".format(cmd, duration)
         if packet_count is not None:
diff --git a/src/antlion/controllers/spectracom_lib/__init__.py b/src/antlion/controllers/spectracom_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/spectracom_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/spectracom_lib/gsg6.py b/src/antlion/controllers/spectracom_lib/gsg6.py
deleted file mode 100644
index e3d0896..0000000
--- a/src/antlion/controllers/spectracom_lib/gsg6.py
+++ /dev/null
@@ -1,219 +0,0 @@
-"""Python module for Spectracom/Orolia GSG-6 GNSS simulator."""
-
-from antlion.controllers import abstract_inst
-
-
-class GSG6Error(abstract_inst.SocketInstrumentError):
-    """GSG-6 Instrument Error Class."""
-
-
-class GSG6(abstract_inst.SocketInstrument):
-    """GSG-6 Class, inherted from abstract_inst SocketInstrument."""
-
-    def __init__(self, ip_addr, ip_port):
-        """Init method for GSG-6.
-
-        Args:
-            ip_addr: IP Address.
-                Type, str.
-            ip_port: TCPIP Port.
-                Type, str.
-        """
-        super(GSG6, self).__init__(ip_addr, ip_port)
-
-        self.idn = ''
-
-    def connect(self):
-        """Init and Connect to GSG-6."""
-        self._connect_socket()
-
-        self.get_idn()
-
-        infmsg = 'Connected to GSG-6, with ID: {}'.format(self.idn)
-        self._logger.debug(infmsg)
-
-    def close(self):
-        """Close GSG-6."""
-        self._close_socket()
-
-        self._logger.debug('Closed connection to GSG-6')
-
-    def get_idn(self):
-        """Get the Idenification of GSG-6.
-
-        Returns:
-            GSG-6 Identifier
-        """
-        self.idn = self._query('*IDN?')
-
-        return self.idn
-
-    def start_scenario(self, scenario=''):
-        """Start to run scenario.
-
-        Args:
-            scenario: Scenario to run.
-                Type, str.
-                Default, '', which will run current selected one.
-        """
-        if scenario:
-            cmd = 'SOUR:SCEN:LOAD ' + scenario
-            self._send(cmd)
-
-        self._send('SOUR:SCEN:CONT START')
-
-        if scenario:
-            infmsg = 'Started running scenario {}'.format(scenario)
-        else:
-            infmsg = 'Started running current scenario'
-
-        self._logger.debug(infmsg)
-
-    def stop_scenario(self):
-        """Stop the running scenario."""
-
-        self._send('SOUR:SCEN:CONT STOP')
-
-        self._logger.debug('Stopped running scenario')
-
-    def preset(self):
-        """Preset GSG-6 to default status."""
-        self._send('*RST')
-
-        self._logger.debug('Reset GSG-6')
-
-    def set_power(self, power_level):
-        """set GSG-6 transmit power on all bands.
-
-        Args:
-            power_level: transmit power level
-                Type, float.
-                Decimal, unit [dBm]
-
-        Raises:
-            GSG6Error: raise when power level is not in [-160, -65] range.
-        """
-        if not -160 <= power_level <= -65:
-            errmsg = ('"power_level" must be within [-160, -65], '
-                      'current input is {}').format(str(power_level))
-            raise GSG6Error(error=errmsg, command='set_power')
-
-        self._send(':SOUR:POW ' + str(round(power_level, 1)))
-
-        infmsg = 'Set GSG-6 transmit power to "{}"'.format(round(
-            power_level, 1))
-        self._logger.debug(infmsg)
-
-    def get_nmealog(self):
-        """Get GSG6 NMEA data.
-
-        Returns:
-            GSG6's NMEA data
-        """
-        nmea_data = self._query('SOUR:SCEN:LOG?')
-
-        return nmea_data
-
-    def toggle_scenario_power(self,
-                              toggle_onoff='ON',
-                              sat_id='',
-                              sat_system=''):
-        """Toggle ON OFF scenario.
-
-        Args:
-            toggle_onoff: turn on or off the satellites
-                Type, str. Option ON/OFF
-                Default, 'ON'
-            sat_id: satellite identifiers
-                Type, str.
-                Option 'Gxx/Rxx/Exx/Cxx/Jxx/Ixx/Sxxx'
-                where xx is satellite identifiers no.
-                e.g.: G10
-            sat_system: to toggle On/OFF for all Satellites
-                Type, str
-                Option [GPS, GLO, GAL, BDS, QZSS, IRNSS, SBAS]
-        Raises:
-            GSG6Error: raise when toggle is not set.
-        """
-        if not sat_id and not sat_system:
-            self._send(':SOUR:SCEN:POW ' + str(toggle_onoff))
-            infmsg = 'Set GSG-6 Power to "{}"'.format(toggle_onoff)
-            self._logger.debug(infmsg)
-
-        elif sat_id and not sat_system:
-            self._send(':SOUR:SCEN:POW ' + str(sat_id) + ',' +
-                       str(toggle_onoff))
-            infmsg = ('Set GSG-6 Power to "{}" for "{}" satellite '
-                      'identifiers').format(toggle_onoff, sat_id)
-            self._logger.debug(infmsg)
-
-        elif not sat_id and sat_system:
-            self._send(':SOUR:SCEN:POW ' + str(sat_system) + ',' +
-                       str(toggle_onoff))
-            infmsg = 'Set GSG-6 Power to "{}" for "{}" satellite system'.format(
-                toggle_onoff, sat_system)
-            self._logger.debug(infmsg)
-
-        else:
-            errmsg = ('"toggle power" must have either of these value [ON/OFF],'
-                      ' current input is {}').format(str(toggle_onoff))
-            raise GSG6Error(error=errmsg, command='toggle_scenario_power')
-
-    def set_scenario_power(self,
-                           power_level,
-                           sat_id='',
-                           sat_system='',
-                           freq_band=''):
-        """Set dynamic power for the running scenario.
-
-        Args:
-            power_level: transmit power level
-                Type, float.
-                Decimal, unit [dBm]
-            sat_id: set power level for specific satellite identifiers
-                Type, str. Option
-                'Gxx/Rxx/Exx/Cxx/Jxx/Ixx/Sxxx'
-                where xx is satellite identifiers number
-                e.g.: G10
-            sat_system: to set power level for all Satellites
-                Type, str
-                Option [GPS, GLO, GAL, BDS, QZSS, IRNSS, SBAS]
-            freq_band: Frequency band to set the power level
-                Type, str
-                Option  [L1, L2, L5, ALL]
-                Default, '', assumed to be L1.
-        Raises:
-            GSG6Error: raise when power level is not in [-160, -65] range.
-        """
-        if freq_band == 'ALL':
-            if not -100 <= power_level <= 100:
-                errmsg = ('"power_level" must be within [-100, 100], for '
-                          '"freq_band"="ALL", current input is {}').format(
-                              str(power_level))
-                raise GSG6Error(error=errmsg, command='set_scenario_power')
-        else:
-            if not -160 <= power_level <= -65:
-                errmsg = ('"power_level" must be within [-160, -65], for '
-                          '"freq_band" != "ALL", current input is {}').format(
-                              str(power_level))
-                raise GSG6Error(error=errmsg, command='set_scenario_power')
-
-        if sat_id and not sat_system:
-            self._send(':SOUR:SCEN:POW ' + str(sat_id) + ',' +
-                       str(round(power_level, 1)) + ',' + str(freq_band))
-            infmsg = ('Set GSG-6 transmit power to "{}" for "{}" '
-                      'satellite id').format(round(power_level, 1), sat_id)
-            self._logger.debug(infmsg)
-
-        elif not sat_id and sat_system:
-            self._send(':SOUR:SCEN:POW ' + str(sat_system) + ',' +
-                       str(round(power_level, 1)) + ',' + str(freq_band))
-            infmsg = ('Set GSG-6 transmit power to "{}" for "{}" '
-                      'satellite system').format(round(power_level, 1),
-                                                 sat_system)
-            self._logger.debug(infmsg)
-
-        else:
-            errmsg = ('sat_id or sat_system must have value, current input of '
-                      'sat_id {} and sat_system {}').format(sat_id, sat_system)
-            raise GSG6Error(error=errmsg, command='set_scenario_power')
diff --git a/src/antlion/controllers/spirent_lib/__init__.py b/src/antlion/controllers/spirent_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/spirent_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/spirent_lib/gss6450.py b/src/antlion/controllers/spirent_lib/gss6450.py
deleted file mode 100644
index 3fd1191..0000000
--- a/src/antlion/controllers/spirent_lib/gss6450.py
+++ /dev/null
@@ -1,381 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Python module for Spirent GSS6450 GNSS RPS."""
-
-import datetime
-import numbers
-from antlion.controllers import abstract_inst
-
-
-class GSS6450Error(abstract_inst.SocketInstrumentError):
-    """GSS6450 Instrument Error Class."""
-
-
-class GSS6450(abstract_inst.RequestInstrument):
-    """GSS6450 Class, inherted from abstract_inst RequestInstrument."""
-
-    def __init__(self, ip_addr):
-        """Init method for GSS6450.
-
-        Args:
-            ip_addr: IP Address.
-                Type, str.
-        """
-        super(GSS6450, self).__init__(ip_addr)
-
-        self.idn = 'Spirent-GSS6450'
-
-    def _put(self, cmd):
-        """Send put command via GSS6450 HTTP Request and get response.
-
-        Args:
-            cmd: parameters listed in SHM_PUT.
-                Type, Str.
-
-        Returns:
-            resp: Response from the _query method.
-                Type, Str.
-        """
-        put_cmd = 'shm_put.shtml?' + cmd
-        resp = self._query(put_cmd)
-
-        return resp
-
-    def _get(self, cmd):
-        """Send get command via GSS6450 HTTP Request and get response.
-
-        Args:
-            cmd: parameters listed in SHM_GET.
-                Type, Str.
-
-        Returns:
-          resp: Response from the _query method.
-              Type, Str.
-        """
-        get_cmd = 'shm_get.shtml?' + cmd
-        resp = self._query(get_cmd)
-
-        return resp
-
-    def get_scenario_filename(self):
-        """Get the scenario filename of GSS6450.
-
-        Returns:
-            filename: RPS Scenario file name.
-                Type, Str.
-        """
-        resp_raw = self._get('-f')
-        filename = resp_raw.split(':')[-1].strip(' ')
-        self._logger.debug('Got scenario file name: "%s".', filename)
-
-        return filename
-
-    def get_scenario_description(self):
-        """Get the scenario description of GSS6450.
-
-        Returns:
-            description: RPS Scenario description.
-                Type, Str.
-        """
-        resp_raw = self._get('-d')
-        description = resp_raw.split('-d')[-1].strip(' ')
-
-        if description:
-            self._logger.debug('Got scenario description: "%s".', description)
-        else:
-            self._logger.warning('Got scenario description with empty string.')
-
-        return description
-
-    def get_scenario_location(self):
-        """Get the scenario location of GSS6450.
-
-        Returns:
-            location: RPS Scenario location.
-                Type, Str.
-        """
-        resp_raw = self._get('-i')
-        location = resp_raw.split('-i')[-1].strip(' ')
-
-        if location:
-            self._logger.debug('Got scenario location: "%s".', location)
-        else:
-            self._logger.warning('Got scenario location with empty string.')
-
-        return location
-
-    def get_operation_mode(self):
-        """Get the operation mode of GSS6450.
-
-        Returns:
-            mode: RPS Operation Mode.
-                Type, Str.
-                Option, STOPPED/PLAYING/RECORDING
-        """
-        resp_raw = self._get('-m')
-        mode = resp_raw.split('-m')[-1].strip(' ')
-        self._logger.debug('Got operation mode: "%s".', mode)
-
-        return mode
-
-    def get_battery_level(self):
-        """Get the battery level of GSS6450.
-
-        Returns:
-            batterylevel: RPS Battery Level.
-                Type, float.
-        """
-        resp_raw = self._get('-l')
-        batterylevel = float(resp_raw.split('-l')[-1].strip(' '))
-        self._logger.debug('Got battery level: %s%%.', batterylevel)
-
-        return batterylevel
-
-    def get_rfport_voltage(self):
-        """Get the RF port voltage of GSS6450.
-
-        Returns:
-            voltageout: RPS RF port voltage.
-                Type, str
-        """
-        resp_raw = self._get('-v')
-        voltageout = resp_raw.split('-v')[-1].strip(' ')
-        self._logger.debug('Got RF port voltage: "%s".', voltageout)
-
-        return voltageout
-
-    def get_storage_media(self):
-        """Get the storage media of GSS6450.
-
-        Returns:
-            media: RPS storage.
-                Type, str
-
-        Raises:
-            GSS6450Error: raise when request response is not support.
-        """
-        resp_raw = self._get('-M')
-        resp_num = resp_raw.split('-M')[-1].strip(' ')
-
-        if resp_num == '1':
-            media = '1-INTERNAL'
-        elif resp_num == '2':
-            media = '2-REMOVABLE'
-        else:
-            errmsg = ('"{}" is not recognized as GSS6450 valid storage media'
-                      ' type'.format(resp_num))
-            raise GSS6450Error(error=errmsg, command='get_storage_media')
-
-        self._logger.debug('Got current storage media: %s.', media)
-
-        return media
-
-    def get_attenuation(self):
-        """Get the attenuation of GSS6450.
-
-        Returns:
-            attenuation: RPS attenuation level, in dB.
-                Type, list of float.
-        """
-        resp_raw = self._get('-a')
-        resp_str = resp_raw.split('-a')[-1].strip(' ')
-        self._logger.debug('Got attenuation: %s dB.', resp_str)
-        attenuation = [float(itm) for itm in resp_str.split(',')]
-
-        return attenuation
-
-    def get_elapsed_time(self):
-        """Get the running scenario elapsed time of GSS6450.
-
-        Returns:
-            etime: RPS elapsed time.
-                Type, datetime.timedelta.
-        """
-        resp_raw = self._get('-e')
-        resp_str = resp_raw.split('-e')[-1].strip(' ')
-        self._logger.debug('Got senario elapsed time: "%s".', resp_str)
-        etime_tmp = datetime.datetime.strptime(resp_str, '%H:%M:%S')
-        etime = datetime.timedelta(hours=etime_tmp.hour,
-                                   minutes=etime_tmp.minute,
-                                   seconds=etime_tmp.second)
-
-        return etime
-
-    def get_playback_offset(self):
-        """Get the running scenario playback offset of GSS6450.
-
-        Returns:
-            offset: RPS playback offset.
-                Type, datetime.timedelta.
-        """
-        resp_raw = self._get('-o')
-        offset_tmp = float(resp_raw.split('-o')[-1].strip(' '))
-        self._logger.debug('Got senario playback offset: %s sec.', offset_tmp)
-        offset = datetime.timedelta(seconds=offset_tmp)
-
-        return offset
-
-    def play_scenario(self, scenario=''):
-        """Start to play scenario in GSS6450.
-
-        Args:
-            scenario: Scenario to play.
-                Type, str.
-                Default, '', which will run current selected one.
-        """
-        if scenario:
-            cmd = '-f{},-wP'.format(scenario)
-        else:
-            cmd = '-wP'
-
-        _ = self._put(cmd)
-
-        if scenario:
-            infmsg = 'Started playing scenario: "{}".'.format(scenario)
-        else:
-            infmsg = 'Started playing current scenario.'
-
-        self._logger.debug(infmsg)
-
-    def record_scenario(self, scenario=''):
-        """Start to record scenario in GSS6450.
-
-        Args:
-            scenario: Scenario to record.
-                Type, str.
-                Default, '', which will run current selected one.
-        """
-        if scenario:
-            cmd = '-f{},-wR'.format(scenario)
-        else:
-            cmd = '-wR'
-
-        _ = self._put(cmd)
-
-        if scenario:
-            infmsg = 'Started recording scenario: "{}".'.format(scenario)
-        else:
-            infmsg = 'Started recording scenario.'
-
-        self._logger.debug(infmsg)
-
-    def stop_scenario(self):
-        """Start to stop playing/recording scenario in GSS6450."""
-        _ = self._put('-wS')
-
-        self._logger.debug('Stopped playing/recording scanrio.')
-
-    def set_rfport_voltage(self, voltageout):
-        """Set the RF port voltage of GSS6450.
-
-        Args:
-            voltageout: RPS RF port voltage.
-                Type, str
-
-        Raises:
-            GSS6450Error: raise when voltageout input is not valid.
-        """
-        if voltageout == 'OFF':
-            voltage_cmd = '0'
-        elif voltageout == '3.3V':
-            voltage_cmd = '3'
-        elif voltageout == '5V':
-            voltage_cmd = '5'
-        else:
-            errmsg = ('"{}" is not recognized as GSS6450 valid RF port voltage'
-                      ' type'.format(voltageout))
-            raise GSS6450Error(error=errmsg, command='set_rfport_voltage')
-
-        _ = self._put('-v{},-wV'.format(voltage_cmd))
-        self._logger.debug('Set RF port voltage: "%s".', voltageout)
-
-    def set_attenuation(self, attenuation):
-        """Set the attenuation of GSS6450.
-
-        Args:
-            attenuation: RPS attenuation level, in dB.
-                Type, numerical.
-
-        Raises:
-            GSS6450Error: raise when attenuation is not in range.
-        """
-        if not 0 <= attenuation <= 31:
-            errmsg = ('"attenuation" must be within [0, 31], '
-                      'current input is {}').format(str(attenuation))
-            raise GSS6450Error(error=errmsg, command='set_attenuation')
-
-        attenuation_raw = round(attenuation)
-
-        if attenuation_raw != attenuation:
-            warningmsg = ('"attenuation" must be integer, current input '
-                          'will be rounded to {}'.format(attenuation_raw))
-            self._logger.warning(warningmsg)
-
-        _ = self._put('-a{},-wA'.format(attenuation_raw))
-
-        self._logger.debug('Set attenuation: %s dB.', attenuation_raw)
-
-    def set_playback_offset(self, offset):
-        """Set the playback offset of GSS6450.
-
-        Args:
-            offset: RPS playback offset.
-                Type, datetime.timedelta, or numerical.
-
-        Raises:
-            GSS6450Error: raise when offset is not numeric or timedelta.
-        """
-        if isinstance(offset, datetime.timedelta):
-            offset_raw = offset.total_seconds()
-        elif isinstance(offset, numbers.Number):
-            offset_raw = offset
-        else:
-            raise GSS6450Error(error=('"offset" must be numerical value or '
-                                      'datetime.timedelta'),
-                               command='set_playback_offset')
-
-        _ = self._put('-o{}'.format(offset_raw))
-
-        self._logger.debug('Set playback offset: %s sec.', offset_raw)
-
-    def set_storage_media(self, media):
-        """Set the storage media of GSS6450.
-
-        Args:
-            media: RPS storage Media, Internal or External.
-                Type, str. Option, 'internal', 'removable'
-
-        Raises:
-            GSS6450Error: raise when media option is not support.
-        """
-        if media == 'internal':
-            raw_media = '1'
-        elif media == 'removable':
-            raw_media = '2'
-        else:
-            raise GSS6450Error(
-                error=('"media" input must be in ["internal", "removable"]. '
-                       ' Current input is {}'.format(media)),
-                command='set_storage_media')
-
-        _ = self._put('-M{}-wM'.format(raw_media))
-
-        resp_raw = self.get_storage_media()
-        if raw_media != resp_raw[0]:
-            raise GSS6450Error(
-                error=('Setting media "{}" is not the same as queried media '
-                       '"{}".'.format(media, resp_raw)),
-                command='set_storage_media')
diff --git a/src/antlion/controllers/spirent_lib/gss7000.py b/src/antlion/controllers/spirent_lib/gss7000.py
deleted file mode 100644
index 7cc47d4..0000000
--- a/src/antlion/controllers/spirent_lib/gss7000.py
+++ /dev/null
@@ -1,490 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Python module for Spirent GSS7000 GNSS simulator.
-@author: Clay Liao (jianhsiungliao@)
-"""
-from time import sleep
-import xml.etree.ElementTree as ET
-from antlion.controllers import abstract_inst
-
-
-def get_xml_text(xml_string='', tag=''):
-    """Parse xml from string and return specific tag
-
-        Args:
-            xml_string: xml string,
-                Type, Str.
-            tag: tag in xml,
-                Type, Str.
-
-        Returns:
-            text: Text content in the tag
-                Type, Str.
-        """
-    if xml_string and tag:
-        root = ET.fromstring(xml_string)
-        try:
-            text = str(root.find(tag).text).rstrip().lstrip()
-        except ValueError:
-            text = 'INVALID DATA'
-    else:
-        text = 'INVALID DATA'
-    return text
-
-
-class GSS7000Error(abstract_inst.SocketInstrumentError):
-    """GSS7000 Instrument Error Class."""
-
-
-class AbstractInstGss7000(abstract_inst.SocketInstrument):
-    """Abstract instrument for  GSS7000"""
-
-    def _query(self, cmd):
-        """query instrument via Socket.
-
-        Args:
-            cmd: Command to send,
-                Type, Str.
-
-        Returns:
-            resp: Response from Instrument via Socket,
-                Type, Str.
-        """
-        self._send(cmd)
-        self._wait()
-        resp = self._recv()
-        return resp
-
-    def _wait(self, wait_time=1):
-        """wait function
-        Args:
-            wait_time: wait time in sec.
-                Type, int,
-                Default, 1.
-        """
-        sleep(wait_time)
-
-
-class GSS7000Ctrl(AbstractInstGss7000):
-    """GSS7000 control daemon class"""
-
-    def __init__(self, ip_addr, ip_port=7717):
-        """Init method for GSS7000 Control Daemon.
-
-        Args:
-            ip_addr: IP Address.
-                Type, str.
-            ip_port: TCPIP Port.
-                Type, str.
-        """
-        super().__init__(ip_addr, ip_port)
-        self.idn = 'Spirent-GSS7000 Control Daemon'
-
-    def connect(self):
-        """Init and Connect to GSS7000 Control Daemon."""
-        # Connect socket then connect socket again
-        self._close_socket()
-        self._connect_socket()
-        # Stop GSS7000 Control Daeamon Then Start
-        self._query('STOP_ENGINE')
-        self._wait()
-        self._query('START_ENGINE')
-
-    def close(self):
-        """Close GSS7000 control daemon"""
-        self._close_socket()
-        self._logger.debug('Closed connection to GSS7000 control daemon')
-
-
-class GSS7000(AbstractInstGss7000):
-    """GSS7000 Class, inherted from abstract_inst SocketInstrument."""
-
-    def __init__(self, ip_addr, engine_ip_port=15650, ctrl_ip_port=7717):
-        """Init method for GSS7000.
-
-        Args:
-            ip_addr: IP Address.
-                Type, str.
-            engine_ip_port: TCPIP Port for
-                Type, str.
-            ctrl_ip_port: TCPIP Port for Control Daemon
-        """
-        super().__init__(ip_addr, engine_ip_port)
-        self.idn = ''
-        self.connected = False
-        self.capability = []
-        self.gss7000_ctrl_daemon = GSS7000Ctrl(ip_addr, ctrl_ip_port)
-        # Close control daemon and engine sockets at the beginning
-        self.gss7000_ctrl_daemon._close_socket()
-        self._close_socket()
-
-    def connect(self):
-        """Connect GSS7000 engine daemon"""
-        # Connect control daemon socket
-        self._logger.debug('Connect to GSS7000')
-        self.gss7000_ctrl_daemon.connect()
-        # Connect to remote engine socket
-        self._wait()
-        self._connect_socket()
-        self.connected = True
-        self.get_hw_capability()
-
-    def close(self):
-        """Close GSS7000 engine daemon"""
-        # Close GSS7000 control daemon
-        self.gss7000_ctrl_daemon.close()
-        # Close GSS7000 engine daemon
-        self._close_socket()
-        self._logger.debug('Closed connection to GSS7000 engine daemon')
-
-    def _parse_hw_cap(self, xml):
-        """Parse GSS7000 hardware capability xml to list.
-            Args:
-                xml: hardware capability xml,
-                    Type, str.
-
-            Returns:
-                capability: Hardware capability dictionary
-                    Type, list.
-        """
-        root = ET.fromstring(xml)
-        capability_ls = list()
-        sig_cap_list = root.find('data').find('Signal_capabilities').findall(
-            'Signal')
-        for signal in sig_cap_list:
-            value = str(signal.text).rstrip().lstrip()
-            capability_ls.extend(value.upper().split(' '))
-        return capability_ls
-
-    def get_hw_capability(self):
-        """Check GSS7000 hardware capability
-
-            Returns:
-                capability: Hardware capability dictionary,
-                    Type, list.
-        """
-        if self.connected:
-            capability_xml = self._query('GET_LICENCED_HARDWARE_CAPABILITY')
-            self.capability = self._parse_hw_cap(capability_xml)
-
-        return self.capability
-
-    def get_idn(self):
-        """Get the SimREPLAYplus Version
-
-        Returns:
-            SimREPLAYplus Version
-        """
-        idn_xml = self._query('*IDN?')
-        self.idn = get_xml_text(idn_xml, 'data')
-        return self.idn
-
-    def load_scenario(self, scenario=''):
-        """Load the scenario.
-
-        Args:
-            scenario: path of scenario,
-                Type, str
-        """
-        if scenario == '':
-            errmsg = ('Missing scenario file')
-            raise GSS7000Error(error=errmsg, command='load_scenario')
-        else:
-            self._logger.debug('Stopped the original scenario')
-            self._query('-,EN,1')
-            cmd = 'SC,' + scenario
-            self._logger.debug('Loading scenario')
-            self._query(cmd)
-            self._logger.debug('Scenario is loaded')
-            return True
-        return False
-
-    def start_scenario(self, scenario=''):
-        """Load and Start the running scenario.
-
-        Args:
-            scenario: path of scenario,
-                Type, str
-        """
-        if scenario:
-            if self.load_scenario(scenario):
-                self._query('RU')
-            else:
-                infmsg = 'No scenario is loaded. Stop running scenario'
-                self._logger.debug(infmsg)
-        else:
-            pass
-
-        if scenario:
-            infmsg = 'Started running scenario {}'.format(scenario)
-        else:
-            infmsg = 'Started running current scenario'
-
-        self._logger.debug(infmsg)
-
-    def get_scenario_name(self):
-        """Get current scenario name"""
-        sc_name_xml = self._query('SC_NAME')
-        return get_xml_text(sc_name_xml, 'data')
-
-    def stop_scenario(self):
-        """Stop the running scenario."""
-        self._query('-,EN,1')
-        self._logger.debug('Stopped running scenario')
-
-    def set_power_offset(self, ant=1, power_offset=0):
-        """Set Power Offset of GSS7000 Tx
-        Args:
-            ant: antenna number of GSS7000
-            power_offset: transmit power offset level
-                Type, float.
-                Decimal, unit [dB]
-
-        Raises:
-            GSS7000Error: raise when power offset level is not in [-49, 15] range.
-        """
-        if not -49 <= power_offset <= 15:
-            errmsg = (f'"power_offset" must be within [-49, 15], '
-                      f'current input is {power_offset}')
-            raise GSS7000Error(error=errmsg, command='set_power_offset')
-
-        cmd = f'-,POW_LEV,V1_A{ant},{power_offset},GPS,0,0,1,1,1,1,0'
-        self._query(cmd)
-
-        infmsg = f'Set veichel 1 antenna {ant} power offset: {power_offset}'
-        self._logger.debug(infmsg)
-
-    def set_ref_power(self, ref_dBm=-130):
-        """Set Ref Power of GSS7000 Tx
-        Args:
-            ref_dBm: transmit reference power level in dBm for GSS7000
-                Type, float.
-                Decimal, unit [dBm]
-
-        Raises:
-            GSS7000Error: raise when power offset level is not in [-170, -115] range.
-        """
-        if not -170 <= ref_dBm <= -115:
-            errmsg = ('"power_offset" must be within [-170, -115], '
-                      'current input is {}').format(str(ref_dBm))
-            raise GSS7000Error(error=errmsg, command='set_ref_power')
-        cmd = 'REF_DBM,{}'.format(str(round(ref_dBm, 1)))
-        self._query(cmd)
-        infmsg = 'Set reference power level: {}'.format(str(round(ref_dBm, 1)))
-        self._logger.debug(infmsg)
-
-    def get_status(self, return_txt=False):
-        """Get current GSS7000 Status
-        Args:
-            return_txt: booling for determining the return results
-                Type, booling.
-        """
-        status_xml = self._query('NULL')
-        status = get_xml_text(status_xml, 'status')
-        if return_txt:
-            status_dict = {
-                '0': 'No Scenario loaded',
-                '1': 'Not completed loading a scenario',
-                '2': 'Idle, ready to run a scenario',
-                '3': 'Arming the scenario',
-                '4': 'Completed arming; or waiting for a command or'
-                     'trigger signal to start the scenario',
-                '5': 'Scenario running',
-                '6': 'Current scenario is paused.',
-                '7': 'Active scenario has stopped and has not been reset.'
-                     'Waiting for further commands.'
-            }
-            return status_dict.get(status)
-        else:
-            return int(status)
-
-    def set_power(self, power_level=-130):
-        """Set Power Level of GSS7000 Tx
-        Args:
-            power_level: transmit power level
-                Type, float.
-                Decimal, unit [dBm]
-
-        Raises:
-            GSS7000Error: raise when power level is not in [-170, -115] range.
-        """
-        if not -170 <= power_level <= -115:
-            errmsg = (f'"power_level" must be within [-170, -115], '
-                      f'current input is {power_level}')
-            raise GSS7000Error(error=errmsg, command='set_power')
-
-        power_offset = power_level + 130
-        self.set_power_offset(1, power_offset)
-        self.set_power_offset(2, power_offset)
-
-        infmsg = 'Set GSS7000 transmit power to "{}"'.format(
-            round(power_level, 1))
-        self._logger.debug(infmsg)
-
-    def power_lev_offset_cal(self, power_level=-130, sat='GPS', band='L1'):
-        """Convert target power level to power offset for GSS7000 power setting
-        Args:
-            power_level: transmit power level
-                Type, float.
-                Decimal, unit [dBm]
-                Default. -130
-            sat_system: to set power level for all Satellites
-                Type, str
-                Option 'GPS/GLO/GAL'
-                Type, str
-            freq_band: Frequency band to set the power level
-                Type, str
-                Option 'L1/L5/B1I/B1C/B2A/E5'
-                Default, '', assumed to be L1.
-        Return:
-            power_offset: The calculated power offset for setting GSS7000 GNSS target power.
-        """
-        gss7000_tx_pwr = {
-            'GPS_L1': -130,
-            'GPS_L5': -127.9,
-            'GLONASS_F1': -131,
-            'GALILEO_L1': -127,
-            'GALILEO_E5': -122,
-            'BEIDOU_B1I': -133,
-            'BEIDOU_B1C': -130,
-            'BEIDOU_B2A': -127,
-            'QZSS_L1': -128.5,
-            'QZSS_L5': -124.9,
-            'IRNSS_L5': -130
-        }
-
-        sat_band = f'{sat}_{band}'
-        infmsg = f'Target satellite system and band: {sat_band}'
-        self._logger.debug(infmsg)
-        default_pwr_lev = gss7000_tx_pwr.get(sat_band, -130)
-        power_offset = power_level - default_pwr_lev
-        infmsg = (
-            f'Targer power: {power_level}; Default power: {default_pwr_lev};'
-            f' Power offset: {power_offset}')
-        self._logger.debug(infmsg)
-
-        return power_offset
-
-    def sat_band_convert(self, sat, band):
-        """Satellite system and operation band conversion and check.
-        Args:
-            sat: to set power level for all Satellites
-                Type, str
-                Option 'GPS/GLO/GAL/BDS'
-                Type, str
-            band: Frequency band to set the power level
-                Type, str
-                Option 'L1/L5/B1I/B1C/B2A/F1/E5'
-                Default, '', assumed to be L1.
-        """
-        sat_system_dict = {
-            'GPS': 'GPS',
-            'GLO': 'GLONASS',
-            'GAL': 'GALILEO',
-            'BDS': 'BEIDOU',
-            'IRNSS': 'IRNSS',
-            'ALL': 'GPS'
-        }
-        sat = sat_system_dict.get(sat, 'GPS')
-        if band == '':
-            infmsg = 'No band is set. Set to default band = L1'
-            self._logger.debug(infmsg)
-            band = 'L1'
-        if sat == '':
-            infmsg = 'No satellite system is set. Set to default sat = GPS'
-            self._logger.debug(infmsg)
-            sat = 'GPS'
-        sat_band = f'{sat}_{band}'
-        self._logger.debug(f'Current band: {sat_band}')
-        self._logger.debug(f'Capability: {self.capability}')
-        # Check if satellite standard and band are supported
-        # If not in support list, return GPS_L1 as default
-        if not sat_band in self.capability:
-            errmsg = (
-                f'Satellite system and band ({sat_band}) are not supported.'
-                f'The GSS7000 support list: {self.capability}')
-            raise GSS7000Error(error=errmsg, command='set_scenario_power')
-        else:
-            sat_band_tp = tuple(sat_band.split('_'))
-
-        return sat_band_tp
-
-    def set_scenario_power(self,
-                           power_level=-130,
-                           sat_id='',
-                           sat_system='',
-                           freq_band='L1'):
-        """Set dynamic power for the running scenario.
-        Args:
-            power_level: transmit power level
-                Type, float.
-                Decimal, unit [dBm]
-                Default. -130
-            sat_id: set power level for specific satellite identifiers
-                Type, int.
-            sat_system: to set power level for all Satellites
-                Type, str
-                Option 'GPS/GLO/GAL/BDS'
-                Type, str
-                Default, '', assumed to be GPS.
-            freq_band: Frequency band to set the power level
-                Type, str
-                Option 'L1/L5/B1I/B1C/B2A/F1/E5/ALL'
-                Default, '', assumed to be L1.
-        Raises:
-            GSS7000Error: raise when power offset is not in [-49, -15] range.
-        """
-        band_dict = {
-            'L1': 1,
-            'L5': 2,
-            'B2A': 2,
-            'B1I': 1,
-            'B1C': 1,
-            'F1': 1,
-            'E5': 2,
-            'ALL': 3
-        }
-
-        # Convert and check satellite system and band
-        sat, band = self.sat_band_convert(sat_system, freq_band)
-        # Get freq band setting
-        band_cmd = band_dict.get(band, 1)
-
-        if not sat_id:
-            sat_id = 0
-            all_tx_type = 1
-        else:
-            all_tx_type = 0
-
-        # Convert absolute power level to absolute power offset.
-        power_offset = self.power_lev_offset_cal(power_level, sat, band)
-
-        if not -49 <= power_offset <= 15:
-            errmsg = (f'"power_offset" must be within [-49, 15], '
-                      f'current input is {power_offset}')
-            raise GSS7000Error(error=errmsg, command='set_power_offset')
-
-        if band_cmd == 1:
-            cmd = f'-,POW_LEV,v1_a1,{power_offset},{sat},{sat_id},0,0,0,1,1,{all_tx_type}'
-            self._query(cmd)
-        elif band_cmd == 2:
-            cmd = f'-,POW_LEV,v1_a2,{power_offset},{sat},{sat_id},0,0,0,1,1,{all_tx_type}'
-            self._query(cmd)
-        elif band_cmd == 3:
-            cmd = f'-,POW_LEV,v1_a1,{power_offset},{sat},{sat_id},0,0,0,1,1,{all_tx_type}'
-            self._query(cmd)
-            cmd = f'-,POW_LEV,v1_a2,{power_offset},{sat},{sat_id},0,0,0,1,1,{all_tx_type}'
diff --git a/src/antlion/controllers/tigertail.py b/src/antlion/controllers/tigertail.py
deleted file mode 100644
index e99c6e5..0000000
--- a/src/antlion/controllers/tigertail.py
+++ /dev/null
@@ -1,123 +0,0 @@
-"""Module manager the required definitions for tigertail"""
-
-import logging
-import time
-
-from enum import Enum
-
-from antlion.libs.proc import job
-
-MOBLY_CONTROLLER_CONFIG_NAME = "Tigertail"
-ACTS_CONTROLLER_REFERENCE_NAME = "tigertails"
-
-TIGERTAIL_SLEEP_TIME = 5
-
-
-def create(configs):
-    """Takes a list of Tigertail serial numbers and returns Tigertail Controllers.
-
-    Args:
-        configs: A list of serial numbers
-
-    Returns:
-        a list of Tigertail controllers
-
-    Raises:
-        ValueError if the configuration is not a list of serial number
-    """
-    tigertails = []
-    if isinstance(configs, list):
-        for serial_no in configs:
-            tigertail = Tigertail(serial_no)
-            tigertails.append(tigertail)
-    else:
-        raise ValueError(
-            'Invalid config for tigertail, should be a list of serial number')
-
-    return tigertails
-
-
-def destroy(tigertails):
-    pass
-
-
-def get_info(tigertails):
-    return [tigertail.get_info() for tigertail in tigertails]
-
-
-class TigertailError(Exception):
-    pass
-
-
-class TigertailState(Enum):
-    def __str__(self):
-        return str(self.value)
-
-    A = 'A'
-    B = 'B'
-    Off = 'off'
-
-
-class Tigertail(object):
-    def __init__(self, serial_number):
-        self.serial_number = serial_number
-        self.tigertool_bin = None
-
-    def setup(self, user_params):
-        """Links tigertool binary
-
-        This function needs to be:
-        Args:
-            user_params: User defined parameters. Expected field is:
-            {
-                // required, string or list of strings
-                tigertool: ['/path/to/tigertool.par']
-            }
-        """
-        tigertool_path = user_params['tigertool']
-        if tigertool_path is None:
-            self.tigertool_bin = None
-        elif isinstance(tigertool_path, str):
-            self.tigertool_bin = tigertool_path
-        elif isinstance(tigertool_path, list):
-            if len(tigertool_path) == 0:
-                self.tigertool_bin = None
-            else:
-                self.tigertool_bin = tigertool_path[0]
-
-        if self.tigertool_bin is None:
-            raise TigertailError('Tigertail binary not found')
-
-        logging.getLogger().debug(
-            f'Setup {self.serial_number} with binary at {self.tigertool_bin}')
-
-    def turn_on_mux_A(self):
-        self._set_tigertail_state(TigertailState.A)
-
-    def turn_on_mux_B(self):
-        self._set_tigertail_state(TigertailState.B)
-
-    def turn_off(self):
-        self._set_tigertail_state(TigertailState.Off)
-
-    def get_info(self):
-        return {'tigertail_serial_no': self.serial_number}
-
-    def _set_tigertail_state(self, state: TigertailState):
-        """Sets state for tigertail, there are 3 possible states:
-            A  : enable port A
-            B  : enable port B
-            Off: turn off both ports
-        """
-        result = job.run([
-            self.tigertool_bin, '--serialno',
-            str(self.serial_number), '--mux',
-            str(state)
-        ],
-                         timeout=10)
-
-        if result.stderr != '':
-            raise TigertailError(result.stderr)
-
-        # Sleep time to let the device connected/disconnect to tigertail
-        time.sleep(TIGERTAIL_SLEEP_TIME)
diff --git a/src/antlion/controllers/utils_lib/commands/ip.py b/src/antlion/controllers/utils_lib/commands/ip.py
index 0b14d2e..7e028b1 100644
--- a/src/antlion/controllers/utils_lib/commands/ip.py
+++ b/src/antlion/controllers/utils_lib/commands/ip.py
@@ -44,7 +44,7 @@
                  where address is a ipaddress.IPv4Interface and broadcast
                  is an ipaddress.IPv4Address.
         """
-        results = self._runner.run('ip addr show dev %s' % net_interface)
+        results = self._runner.run("ip addr show dev %s" % net_interface)
         lines = results.stdout.splitlines()
 
         # Example stdout:
@@ -57,18 +57,17 @@
 
         for line in lines:
             line = line.strip()
-            match = re.search('inet (?P<address>[^\s]*) brd (?P<bcast>[^\s]*)',
-                              line)
+            match = re.search("inet (?P<address>[^\s]*) brd (?P<bcast>[^\s]*)", line)
             if match:
                 d = match.groupdict()
-                address = ipaddress.IPv4Interface(d['address'])
-                bcast = ipaddress.IPv4Address(d['bcast'])
+                address = ipaddress.IPv4Interface(d["address"])
+                bcast = ipaddress.IPv4Address(d["bcast"])
                 yield (address, bcast)
 
-            match = re.search('inet (?P<address>[^\s]*)', line)
+            match = re.search("inet (?P<address>[^\s]*)", line)
             if match:
                 d = match.groupdict()
-                address = ipaddress.IPv4Interface(d['address'])
+                address = ipaddress.IPv4Interface(d["address"])
                 yield (address, None)
 
     def add_ipv4_address(self, net_interface, address, broadcast=None):
@@ -83,11 +82,12 @@
                        this net_interfaces subnet.
         """
         if broadcast:
-            self._runner.run('ip addr add %s broadcast %s dev %s' %
-                             (address, broadcast, net_interface))
+            self._runner.run(
+                "ip addr add %s broadcast %s dev %s"
+                % (address, broadcast, net_interface)
+            )
         else:
-            self._runner.run('ip addr add %s dev %s' %
-                             (address, net_interface))
+            self._runner.run("ip addr add %s dev %s" % (address, net_interface))
 
     def remove_ipv4_address(self, net_interface, address, ignore_status=False):
         """Remove an ipv4 address.
@@ -104,8 +104,9 @@
             The job result from a the command
         """
         return self._runner.run(
-            'ip addr del %s dev %s' % (address, net_interface),
-            ignore_status=ignore_status)
+            "ip addr del %s dev %s" % (address, net_interface),
+            ignore_status=ignore_status,
+        )
 
     def set_ipv4_address(self, net_interface, address, broadcast=None):
         """Set the ipv4 address.
@@ -134,23 +135,26 @@
         ip_info = self.get_ipv4_addresses(net_interface)
 
         for address, _ in ip_info:
-            result = self.remove_ipv4_address(net_interface, address,
-                                              ignore_status=True)
+            result = self.remove_ipv4_address(
+                net_interface, address, ignore_status=True
+            )
             # It is possible that the address has already been removed by the
             # time this command has been called. In such a case, we would get
             # this error message.
-            error_msg = 'RTNETLINK answers: Cannot assign requested address'
+            error_msg = "RTNETLINK answers: Cannot assign requested address"
             if result.exit_status != 0:
                 if error_msg in result.stderr:
                     # If it was removed by another process, log a warning
                     if address not in self.get_ipv4_addresses(net_interface):
                         self._runner.log.warning(
-                            'Unable to remove address %s. The address was '
-                            'removed by another process.' % address)
+                            "Unable to remove address %s. The address was "
+                            "removed by another process." % address
+                        )
                         continue
                     # If it was not removed, raise an error
                     self._runner.log.error(
-                        'Unable to remove address %s. The address is still '
-                        'registered to %s, despite call for removal.' %
-                        (address, net_interface))
+                        "Unable to remove address %s. The address is still "
+                        "registered to %s, despite call for removal."
+                        % (address, net_interface)
+                    )
                 raise job.Error(result)
diff --git a/src/antlion/controllers/utils_lib/commands/route.py b/src/antlion/controllers/utils_lib/commands/route.py
index 3897f39..a886455 100644
--- a/src/antlion/controllers/utils_lib/commands/route.py
+++ b/src/antlion/controllers/utils_lib/commands/route.py
@@ -29,7 +29,7 @@
 class LinuxRouteCommand(object):
     """Interface for doing standard ip route commands on a linux system."""
 
-    DEFAULT_ROUTE = 'default'
+    DEFAULT_ROUTE = "default"
 
     def __init__(self, runner):
         """
@@ -39,7 +39,7 @@
         """
         self._runner = runner
 
-    def add_route(self, net_interface, address, proto='static'):
+    def add_route(self, net_interface, address, proto="static"):
         """Add an entry to the ip routing table.
 
         Will add a route for either a specific ip address, or a network.
@@ -61,13 +61,14 @@
             NetworkInterfaceDown: Raised when the network interface is down.
         """
         try:
-            self._runner.run(f'ip route add {address} dev {net_interface} proto {proto}')
+            self._runner.run(
+                f"ip route add {address} dev {net_interface} proto {proto}"
+            )
         except connection.CommandError as e:
-            if 'File exists' in e.result.stderr:
-                raise Error('Route already exists.')
-            if 'Network is down' in e.result.stderr:
-                raise NetworkInterfaceDown(
-                    'Device must be up for adding a route.')
+            if "File exists" in e.result.stderr:
+                raise Error("Route already exists.")
+            if "Network is down" in e.result.stderr:
+                raise NetworkInterfaceDown("Device must be up for adding a route.")
             raise
 
     def get_routes(self, net_interface=None):
@@ -84,8 +85,8 @@
                  it will be a ipaddress.IPv4Network otherwise it is a
                  ipaddress.IPv4Address.
         """
-        result_ipv4 = self._runner.run('ip -4 route show')
-        result_ipv6 = self._runner.run('ip -6 route show')
+        result_ipv4 = self._runner.run("ip -4 route show")
+        result_ipv6 = self._runner.run("ip -6 route show")
 
         lines = result_ipv4.stdout.splitlines() + result_ipv6.stdout.splitlines()
 
@@ -96,22 +97,22 @@
         # 192.168.2.1 dev eth2 proto kernel scope link metric 1
         # fe80::/64 dev wlan0 proto static metric 1024
         for line in lines:
-            if not 'dev' in line:
+            if not "dev" in line:
                 continue
 
             if line.startswith(self.DEFAULT_ROUTE):
                 # The default route entry is formatted differently.
-                match = re.search('dev (?P<net_interface>\S+)', line)
+                match = re.search("dev (?P<net_interface>\S+)", line)
                 pair = None
                 if match:
                     # When there is a match for the route entry pattern create
                     # A pair to hold the info.
-                    pair = (self.DEFAULT_ROUTE,
-                            match.groupdict()['net_interface'])
+                    pair = (self.DEFAULT_ROUTE, match.groupdict()["net_interface"])
             else:
                 # Test the normal route entry pattern.
                 match = re.search(
-                    '(?P<address>[0-9A-Fa-f\.\:/]+) dev (?P<net_interface>\S+)', line)
+                    "(?P<address>[0-9A-Fa-f\.\:/]+) dev (?P<net_interface>\S+)", line
+                )
                 pair = None
                 if match:
                     # When there is a match for the route entry pattern create
@@ -119,11 +120,11 @@
                     d = match.groupdict()
                     # Route can be either a network or specific address
                     try:
-                        address = ipaddress.ip_address(d['address'])
+                        address = ipaddress.ip_address(d["address"])
                     except ValueError:
-                        address = d['address']
+                        address = d["address"]
 
-                    pair = (address, d['net_interface'])
+                    pair = (address, d["net_interface"])
 
             # No pair means no pattern was found.
             if not pair:
@@ -171,12 +172,11 @@
         """
         try:
             if net_interface:
-                self._runner.run('ip route del %s dev %s' %
-                                 (address, net_interface))
+                self._runner.run("ip route del %s dev %s" % (address, net_interface))
             else:
-                self._runner.run('ip route del %s' % address)
+                self._runner.run("ip route del %s" % address)
         except connection.CommandError as e:
-            if 'No such process' in e.result.stderr:
+            if "No such process" in e.result.stderr:
                 # The route didn't exist.
                 return
             raise
diff --git a/src/antlion/controllers/utils_lib/commands/shell.py b/src/antlion/controllers/utils_lib/commands/shell.py
index 34abeeb..ed105af 100644
--- a/src/antlion/controllers/utils_lib/commands/shell.py
+++ b/src/antlion/controllers/utils_lib/commands/shell.py
@@ -57,7 +57,7 @@
             job.Error: When the command executed but had an error.
         """
         if self._working_dir:
-            command_str = 'cd %s; %s' % (self._working_dir, command)
+            command_str = "cd %s; %s" % (self._working_dir, command)
         else:
             command_str = command
 
@@ -80,11 +80,11 @@
         """
         try:
             if isinstance(identifier, str):
-                self.run('ps aux | grep -v grep | grep %s' % identifier)
+                self.run("ps aux | grep -v grep | grep %s" % identifier)
             elif isinstance(identifier, int):
                 self.signal(identifier, 0)
             else:
-                raise ValueError('Bad type was given for identifier')
+                raise ValueError("Bad type was given for identifier")
 
             return True
         except job.Error:
@@ -103,7 +103,7 @@
                   if no pids were found.
         """
         try:
-            result = self.run('ps aux | grep -v grep | grep %s' % identifier)
+            result = self.run("ps aux | grep -v grep | grep %s" % identifier)
         except job.Error as e:
             if e.result.exit_status == 1:
                 # Grep returns exit status 1 when no lines are selected. This is
@@ -136,7 +136,7 @@
             True if the string or pattern was found, False otherwise.
         """
         try:
-            self.run('grep %s %s' % (shlex.quote(search_string), file_name))
+            self.run("grep %s %s" % (shlex.quote(search_string), file_name))
             return True
         except job.Error:
             return False
@@ -150,7 +150,7 @@
         Returns:
             A string of the files contents.
         """
-        return self.run('cat %s' % file_name).stdout
+        return self.run("cat %s" % file_name).stdout
 
     def write_file(self, file_name, data):
         """Writes a block of data to a file through the shell.
@@ -159,7 +159,7 @@
             file_name: The name of the file to write to.
             data: The string of data to write.
         """
-        return self.run('echo %s > %s' % (shlex.quote(data), file_name))
+        return self.run("echo %s > %s" % (shlex.quote(data), file_name))
 
     def append_file(self, file_name, data):
         """Appends a block of data to a file through the shell.
@@ -168,7 +168,7 @@
             file_name: The name of the file to write to.
             data: The string of data to write.
         """
-        return self.run('echo %s >> %s' % (shlex.quote(data), file_name))
+        return self.run("echo %s >> %s" % (shlex.quote(data), file_name))
 
     def touch_file(self, file_name):
         """Creates a file through the shell.
@@ -176,7 +176,7 @@
         Args:
             file_name: The name of the file to create.
         """
-        self.write_file(file_name, '')
+        self.write_file(file_name, "")
 
     def delete_file(self, file_name):
         """Deletes a file through the shell.
@@ -185,9 +185,9 @@
             file_name: The name of the file to delete.
         """
         try:
-            self.run('rm -r %s' % file_name)
+            self.run("rm -r %s" % file_name)
         except job.Error as e:
-            if 'No such file or directory' in e.result.stderr:
+            if "No such file or directory" in e.result.stderr:
                 return
 
             raise
@@ -241,4 +241,4 @@
             job.Error: Raised when the signal fail to reach
                        the specified program.
         """
-        self.run('kill -%d %d' % (sig, pid))
+        self.run("kill -%d %d" % (sig, pid))
diff --git a/src/antlion/controllers/utils_lib/host_utils.py b/src/antlion/controllers/utils_lib/host_utils.py
index db7be27..1b66089 100644
--- a/src/antlion/controllers/utils_lib/host_utils.py
+++ b/src/antlion/controllers/utils_lib/host_utils.py
@@ -43,15 +43,16 @@
     Returns:
         True if the port is available; False otherwise.
     """
-    return (_try_bind(port, socket.SOCK_STREAM, socket.IPPROTO_TCP) and
-            _try_bind(port, socket.SOCK_DGRAM, socket.IPPROTO_UDP))
+    return _try_bind(port, socket.SOCK_STREAM, socket.IPPROTO_TCP) and _try_bind(
+        port, socket.SOCK_DGRAM, socket.IPPROTO_UDP
+    )
 
 
 def _try_bind(port, socket_type, socket_proto):
     s = socket.socket(socket.AF_INET, socket_type, socket_proto)
     try:
         try:
-            s.bind(('', port))
+            s.bind(("", port))
             # The result of getsockname() is protocol dependent, but for both
             # IPv4 and IPv6 the second field is a port number.
             return s.getsockname()[1]
diff --git a/src/antlion/controllers/utils_lib/ssh/connection.py b/src/antlion/controllers/utils_lib/ssh/connection.py
index 799905e..23c80d7 100644
--- a/src/antlion/controllers/utils_lib/ssh/connection.py
+++ b/src/antlion/controllers/utils_lib/ssh/connection.py
@@ -37,6 +37,7 @@
     Attributes:
         result: The results of the ssh command that had the error.
     """
+
     def __init__(self, result):
         """
         Args:
@@ -45,12 +46,14 @@
         self.result = result
 
     def __str__(self):
-        return 'cmd: %s\nstdout: %s\nstderr: %s' % (
-            self.result.command, self.result.stdout, self.result.stderr)
+        return "cmd: %s\nstdout: %s\nstderr: %s" % (
+            self.result.command,
+            self.result.stdout,
+            self.result.stderr,
+        )
 
 
-_Tunnel = collections.namedtuple('_Tunnel',
-                                 ['local_port', 'remote_port', 'proc'])
+_Tunnel = collections.namedtuple("_Tunnel", ["local_port", "remote_port", "proc"])
 
 
 class SshConnection(object):
@@ -61,10 +64,11 @@
     a command is run. If the persistent connection fails it will attempt
     to connect normally.
     """
+
     @property
     def socket_path(self):
         """Returns: The os path to the master socket file."""
-        return os.path.join(self._master_ssh_tempdir, 'socket')
+        return os.path.join(self._master_ssh_tempdir, "socket")
 
     def __init__(self, settings):
         """
@@ -81,7 +85,7 @@
         self._tunnels = list()
 
         def log_line(msg):
-            return '[SshConnection | %s] %s' % (self._settings.hostname, msg)
+            return "[SshConnection | %s] %s" % (self._settings.hostname, msg)
 
         self.log = logger.create_logger(log_line)
 
@@ -110,34 +114,35 @@
         with self._lock:
             if self._master_ssh_proc is not None:
                 socket_path = self.socket_path
-                if (not os.path.exists(socket_path)
-                        or self._master_ssh_proc.poll() is not None):
-                    self.log.debug('Master ssh connection to %s is down.',
-                                   self._settings.hostname)
+                if (
+                    not os.path.exists(socket_path)
+                    or self._master_ssh_proc.poll() is not None
+                ):
+                    self.log.debug(
+                        "Master ssh connection to %s is down.", self._settings.hostname
+                    )
                     self._cleanup_master_ssh()
 
             if self._master_ssh_proc is None:
                 # Create a shared socket in a temp location.
-                self._master_ssh_tempdir = tempfile.mkdtemp(
-                    prefix='ssh-master')
+                self._master_ssh_tempdir = tempfile.mkdtemp(prefix="ssh-master")
 
                 # Setup flags and options for running the master ssh
                 # -N: Do not execute a remote command.
                 # ControlMaster: Spawn a master connection.
                 # ControlPath: The master connection socket path.
-                extra_flags = {'-N': None}
+                extra_flags = {"-N": None}
                 extra_options = {
-                    'ControlMaster': True,
-                    'ControlPath': self.socket_path,
-                    'BatchMode': True
+                    "ControlMaster": True,
+                    "ControlPath": self.socket_path,
+                    "BatchMode": True,
                 }
 
                 # Construct the command and start it.
                 master_cmd = self._formatter.format_ssh_local_command(
-                    self._settings,
-                    extra_flags=extra_flags,
-                    extra_options=extra_options)
-                self.log.info('Starting master ssh connection.')
+                    self._settings, extra_flags=extra_flags, extra_options=extra_options
+                )
+                self.log.info("Starting master ssh connection.")
                 self._master_ssh_proc = job.run_async(master_cmd)
 
                 end_time = time.time() + timeout_seconds
@@ -145,18 +150,20 @@
                 while time.time() < end_time:
                     if os.path.exists(self.socket_path):
                         break
-                    time.sleep(.2)
+                    time.sleep(0.2)
                 else:
                     self._cleanup_master_ssh()
-                    raise Error('Master ssh connection timed out.')
+                    raise Error("Master ssh connection timed out.")
 
-    def run(self,
-            command,
-            timeout=60,
-            ignore_status=False,
-            env=None,
-            io_encoding='utf-8',
-            attempts=2):
+    def run(
+        self,
+        command,
+        timeout=60,
+        ignore_status=False,
+        env=None,
+        io_encoding="utf-8",
+        attempts=2,
+    ):
         """Runs a remote command over ssh.
 
         Will ssh to a remote host and run a command. This method will
@@ -189,93 +196,98 @@
         try:
             self.setup_master_ssh(self._settings.connect_timeout)
         except Error:
-            self.log.warning('Failed to create master ssh connection, using '
-                             'normal ssh connection.')
+            self.log.warning(
+                "Failed to create master ssh connection, using "
+                "normal ssh connection."
+            )
 
-        extra_options = {'BatchMode': True}
+        extra_options = {"BatchMode": True}
         if self._master_ssh_proc:
-            extra_options['ControlPath'] = self.socket_path
+            extra_options["ControlPath"] = self.socket_path
 
         identifier = str(uuid.uuid4())
         full_command = 'echo "CONNECTED: %s"; %s' % (identifier, command)
 
         terminal_command = self._formatter.format_command(
-            full_command, env, self._settings, extra_options=extra_options)
+            full_command, env, self._settings, extra_options=extra_options
+        )
 
         dns_retry_count = 2
         while True:
-            result = job.run(terminal_command,
-                             ignore_status=True,
-                             timeout=timeout,
-                             io_encoding=io_encoding)
+            result = job.run(
+                terminal_command,
+                ignore_status=True,
+                timeout=timeout,
+                io_encoding=io_encoding,
+            )
             output = result.stdout
 
             # Check for a connected message to prevent false negatives.
-            valid_connection = re.search('^CONNECTED: %s' % identifier,
-                                         output,
-                                         flags=re.MULTILINE)
+            valid_connection = re.search(
+                "^CONNECTED: %s" % identifier, output, flags=re.MULTILINE
+            )
             if valid_connection:
                 # Remove the first line that contains the connect message.
-                line_index = output.find('\n') + 1
+                line_index = output.find("\n") + 1
                 if line_index == 0:
                     line_index = len(output)
                 real_output = output[line_index:].encode(io_encoding)
 
-                result = job.Result(command=result.command,
-                                    stdout=real_output,
-                                    stderr=result._raw_stderr,
-                                    exit_status=result.exit_status,
-                                    duration=result.duration,
-                                    did_timeout=result.did_timeout,
-                                    encoding=io_encoding)
+                result = job.Result(
+                    command=result.command,
+                    stdout=real_output,
+                    stderr=result._raw_stderr,
+                    exit_status=result.exit_status,
+                    duration=result.duration,
+                    did_timeout=result.did_timeout,
+                    encoding=io_encoding,
+                )
                 if result.exit_status and not ignore_status:
                     raise job.Error(result)
                 return result
 
             error_string = result.stderr
 
-            had_dns_failure = (result.exit_status == 255 and re.search(
-                r'^ssh: .*: Name or service not known',
-                error_string,
-                flags=re.MULTILINE))
+            had_dns_failure = result.exit_status == 255 and re.search(
+                r"^ssh: .*: Name or service not known", error_string, flags=re.MULTILINE
+            )
             if had_dns_failure:
                 dns_retry_count -= 1
                 if not dns_retry_count:
-                    raise Error('DNS failed to find host.', result)
-                self.log.debug('Failed to connect to host, retrying...')
+                    raise Error("DNS failed to find host.", result)
+                self.log.debug("Failed to connect to host, retrying...")
             else:
                 break
 
         had_timeout = re.search(
-            r'^ssh: connect to host .* port .*: '
-            r'Connection timed out\r$',
+            r"^ssh: connect to host .* port .*: " r"Connection timed out\r$",
             error_string,
-            flags=re.MULTILINE)
+            flags=re.MULTILINE,
+        )
         if had_timeout:
-            raise Error('Ssh timed out.', result)
+            raise Error("Ssh timed out.", result)
 
-        permission_denied = 'Permission denied' in error_string
+        permission_denied = "Permission denied" in error_string
         if permission_denied:
-            raise Error('Permission denied.', result)
+            raise Error("Permission denied.", result)
 
         unknown_host = re.search(
-            r'ssh: Could not resolve hostname .*: '
-            r'Name or service not known',
+            r"ssh: Could not resolve hostname .*: " r"Name or service not known",
             error_string,
-            flags=re.MULTILINE)
+            flags=re.MULTILINE,
+        )
         if unknown_host:
-            raise Error('Unknown host.', result)
+            raise Error("Unknown host.", result)
 
-        self.log.error('An unknown error has occurred. Job result: %s' %
-                       result)
-        ping_output = job.run('ping %s -c 3 -w 1' % self._settings.hostname,
-                              ignore_status=True)
-        self.log.error('Ping result: %s' % ping_output)
+        self.log.error("An unknown error has occurred. Job result: %s" % result)
+        ping_output = job.run(
+            "ping %s -c 3 -w 1" % self._settings.hostname, ignore_status=True
+        )
+        self.log.error("Ping result: %s" % ping_output)
         if attempts > 1:
             self._cleanup_master_ssh()
-            self.run(command, timeout, ignore_status, env, io_encoding,
-                     attempts - 1)
-        raise Error('The job failed for unknown reasons.', result)
+            self.run(command, timeout, ignore_status, env, io_encoding, attempts - 1)
+        raise Error("The job failed for unknown reasons.", result)
 
     def run_async(self, command, env=None):
         """Starts up a background command over ssh.
@@ -298,7 +310,9 @@
             SshPermissionDeniedError: When permission is not allowed on the
                                       remote host.
         """
-        return self.run(f'({command}) < /dev/null > /dev/null 2>&1 & echo -n $!', env=env)
+        return self.run(
+            f"({command}) < /dev/null > /dev/null 2>&1 & echo -n $!", env=env
+        )
 
     def close(self):
         """Clean up open connections to remote host."""
@@ -313,14 +327,14 @@
         """
         # If a master SSH connection is running, kill it.
         if self._master_ssh_proc is not None:
-            self.log.debug('Nuking master_ssh_job.')
+            self.log.debug("Nuking master_ssh_job.")
             self._master_ssh_proc.kill()
             self._master_ssh_proc.wait()
             self._master_ssh_proc = None
 
         # Remove the temporary directory for the master SSH socket.
         if self._master_ssh_tempdir is not None:
-            self.log.debug('Cleaning master_ssh_tempdir.')
+            self.log.debug("Cleaning master_ssh_tempdir.")
             shutil.rmtree(self._master_ssh_tempdir)
             self._master_ssh_tempdir = None
 
@@ -346,24 +360,27 @@
                     return tunnel.local_port
 
         extra_flags = {
-            '-n': None,  # Read from /dev/null for stdin
-            '-N': None,  # Do not execute a remote command
-            '-q': None,  # Suppress warnings and diagnostic commands
-            '-L': '%d:localhost:%d' % (local_port, port),
+            "-n": None,  # Read from /dev/null for stdin
+            "-N": None,  # Do not execute a remote command
+            "-q": None,  # Suppress warnings and diagnostic commands
+            "-L": "%d:localhost:%d" % (local_port, port),
         }
         extra_options = dict()
         if self._master_ssh_proc:
-            extra_options['ControlPath'] = self.socket_path
+            extra_options["ControlPath"] = self.socket_path
         tunnel_cmd = self._formatter.format_ssh_local_command(
-            self._settings,
-            extra_flags=extra_flags,
-            extra_options=extra_options)
-        self.log.debug('Full tunnel command: %s', tunnel_cmd)
+            self._settings, extra_flags=extra_flags, extra_options=extra_options
+        )
+        self.log.debug("Full tunnel command: %s", tunnel_cmd)
         # Exec the ssh process directly so that when we deliver signals, we
         # deliver them straight to the child process.
         tunnel_proc = job.run_async(tunnel_cmd)
-        self.log.debug('Started ssh tunnel, local = %d remote = %d, pid = %d',
-                       local_port, port, tunnel_proc.pid)
+        self.log.debug(
+            "Started ssh tunnel, local = %d remote = %d, pid = %d",
+            local_port,
+            port,
+            tunnel_proc.pid,
+        )
         self._tunnels.append(_Tunnel(local_port, port, tunnel_proc))
         return local_port
 
@@ -400,8 +417,10 @@
         """
         # TODO: This may belong somewhere else: b/32572515
         user_host = self._formatter.format_host_name(self._settings)
-        job.run('scp %s %s:%s' % (local_path, user_host, remote_path),
-                ignore_status=ignore_status)
+        job.run(
+            "scp %s %s:%s" % (local_path, user_host, remote_path),
+            ignore_status=ignore_status,
+        )
 
     def pull_file(self, local_path, remote_path, ignore_status=False):
         """Send a file from remote host to local host
@@ -412,10 +431,12 @@
             ignore_status: Whether or not to ignore the command's exit_status.
         """
         user_host = self._formatter.format_host_name(self._settings)
-        job.run('scp %s:%s %s' % (user_host, remote_path, local_path),
-                ignore_status=ignore_status)
+        job.run(
+            "scp %s:%s %s" % (user_host, remote_path, local_path),
+            ignore_status=ignore_status,
+        )
 
-    def find_free_port(self, interface_name='localhost'):
+    def find_free_port(self, interface_name="localhost"):
         """Find a unused port on the remote host.
 
         Note that this method is inherently racy, since it is impossible
@@ -431,7 +452,7 @@
         # TODO: This may belong somewhere else: b/3257251
         free_port_cmd = (
             'python -c "import socket; s=socket.socket(); '
-            's.bind((\'%s\', 0)); print(s.getsockname()[1]); s.close()"'
+            "s.bind(('%s', 0)); print(s.getsockname()[1]); s.close()\""
         ) % interface_name
         port = int(self.run(free_port_cmd).stdout)
         # Yield to the os to ensure the port gets cleaned up.
diff --git a/src/antlion/controllers/utils_lib/ssh/formatter.py b/src/antlion/controllers/utils_lib/ssh/formatter.py
index 2466012..41450ca 100644
--- a/src/antlion/controllers/utils_lib/ssh/formatter.py
+++ b/src/antlion/controllers/utils_lib/ssh/formatter.py
@@ -43,7 +43,7 @@
         Returns:
             A string of the connection host name to connect to.
         """
-        return '%s@%s' % (settings.username, settings.hostname)
+        return "%s@%s" % (settings.username, settings.hostname)
 
     def format_value(self, value):
         """Formats a command line value.
@@ -58,7 +58,7 @@
             A string representation of the formatted value.
         """
         if isinstance(value, bool):
-            return 'yes' if value else 'no'
+            return "yes" if value else "no"
 
         return str(value)
 
@@ -77,8 +77,8 @@
         for option_name in options:
             option = options[option_name]
 
-            yield '-o'
-            yield '%s=%s' % (option_name, self.format_value(option))
+            yield "-o"
+            yield "%s=%s" % (option_name, self.format_value(option))
 
     def format_flag_list(self, flags):
         """Format the flags list.
@@ -99,10 +99,7 @@
             if flag is not None:
                 yield self.format_value(flag)
 
-    def format_ssh_local_command(self,
-                                 settings,
-                                 extra_flags={},
-                                 extra_options={}):
+    def format_ssh_local_command(self, settings, extra_flags={}, extra_options={}):
         """Formats the local part of the ssh command.
 
         Formats the local section of the ssh command. This is the part of the
@@ -136,11 +133,9 @@
 
         return base_command
 
-    def format_ssh_command(self,
-                           remote_command,
-                           settings,
-                           extra_flags={},
-                           extra_options={}):
+    def format_ssh_command(
+        self, remote_command, settings, extra_flags={}, extra_options={}
+    ):
         """Formats the full ssh command.
 
         Creates the full format for an ssh command.
@@ -155,8 +150,9 @@
         Returns:
             A list of strings that make up the total ssh command.
         """
-        local_command = self.format_ssh_local_command(settings, extra_flags,
-                                                      extra_options)
+        local_command = self.format_ssh_local_command(
+            settings, extra_flags, extra_options
+        )
 
         local_command.append(remote_command)
         return local_command
@@ -175,23 +171,18 @@
             machine.
         """
         if not env:
-            env_str = ''
+            env_str = ""
         else:
-            env_str = 'export '
+            env_str = "export "
             for name in env:
                 value = env[name]
-                env_str += '%s=%s ' % (name, str(value))
-            env_str += ';'
+                env_str += "%s=%s " % (name, str(value))
+            env_str += ";"
 
-        execution_line = '%s %s;' % (env_str, command)
+        execution_line = "%s %s;" % (env_str, command)
         return execution_line
 
-    def format_command(self,
-                       command,
-                       env,
-                       settings,
-                       extra_flags={},
-                       extra_options={}):
+    def format_command(self, command, env, settings, extra_flags={}, extra_options={}):
         """Formats a full command.
 
         Formats the full command to run in order to run a command on a remote
@@ -206,5 +197,6 @@
             extra_options: Extra options to include with the settings.
         """
         remote_command = self.format_remote_command(command, env)
-        return self.format_ssh_command(remote_command, settings, extra_flags,
-                                       extra_options)
+        return self.format_ssh_command(
+            remote_command, settings, extra_flags, extra_options
+        )
diff --git a/src/antlion/controllers/utils_lib/ssh/settings.py b/src/antlion/controllers/utils_lib/ssh/settings.py
index ca14b91..ead5844 100644
--- a/src/antlion/controllers/utils_lib/ssh/settings.py
+++ b/src/antlion/controllers/utils_lib/ssh/settings.py
@@ -20,23 +20,7 @@
     An instance of SshSettings or None
 """
 
-
-def from_config(config):
-    if config is None:
-        return None  # Having no settings is not an error
-
-    user = config.get('user', None)
-    host = config.get('host', None)
-    port = config.get('port', 22)
-    identity_file = config.get('identity_file', None)
-    ssh_config = config.get('ssh_config', None)
-    connect_timeout = config.get('connect_timeout', 30)
-    if user is None or host is None:
-        raise ValueError('Malformed SSH config did not include user and '
-                         'host keys: %s' % config)
-
-    return SshSettings(host, user, port=port, identity_file=identity_file,
-                       ssh_config=ssh_config, connect_timeout=connect_timeout)
+from typing import Dict, Optional, Union
 
 
 class SshSettings(object):
@@ -56,16 +40,18 @@
                         connection alive.
     """
 
-    def __init__(self,
-                 hostname,
-                 username,
-                 port=22,
-                 host_file='/dev/null',
-                 connect_timeout=30,
-                 alive_interval=300,
-                 executable='/usr/bin/ssh',
-                 identity_file=None,
-                 ssh_config=None):
+    def __init__(
+        self,
+        hostname: str,
+        username: str,
+        port: int = 22,
+        host_file: str = "/dev/null",
+        connect_timeout: int = 30,
+        alive_interval: int = 300,
+        executable: str = "/usr/bin/ssh",
+        identity_file: Optional[str] = None,
+        ssh_config: Optional[str] = None,
+    ):
         self.username = username
         self.hostname = hostname
         self.executable = executable
@@ -76,7 +62,7 @@
         self.identity_file = identity_file
         self.ssh_config = ssh_config
 
-    def construct_ssh_options(self):
+    def construct_ssh_options(self) -> Dict[str, Union[str, int, bool]]:
         """Construct the ssh options.
 
         Constructs a dictionary of option that should be used with the ssh
@@ -86,27 +72,54 @@
             A dictionary of option name to value.
         """
         current_options = {}
-        current_options['StrictHostKeyChecking'] = False
-        current_options['UserKnownHostsFile'] = self.host_file
-        current_options['ConnectTimeout'] = self.connect_timeout
-        current_options['ServerAliveInterval'] = self.alive_interval
+        current_options["StrictHostKeyChecking"] = False
+        current_options["UserKnownHostsFile"] = self.host_file
+        current_options["ConnectTimeout"] = self.connect_timeout
+        current_options["ServerAliveInterval"] = self.alive_interval
         return current_options
 
-    def construct_ssh_flags(self):
+    def construct_ssh_flags(self) -> Dict[str, Union[None, str, int]]:
         """Construct the ssh flags.
 
         Constructs what flags should be used in the ssh connection.
 
         Returns:
-            A dictonary of flag name to value. If value is none then it is
+            A dictionary of flag name to value. If value is none then it is
             treated as a binary flag.
         """
         current_flags = {}
-        current_flags['-a'] = None
-        current_flags['-x'] = None
-        current_flags['-p'] = self.port
+        current_flags["-a"] = None
+        current_flags["-x"] = None
+        current_flags["-p"] = self.port
         if self.identity_file:
-            current_flags['-i'] = self.identity_file
+            current_flags["-i"] = self.identity_file
         if self.ssh_config:
-            current_flags['-F'] = self.ssh_config
+            current_flags["-F"] = self.ssh_config
         return current_flags
+
+
+def from_config(config: Dict[str, Union[str, int]]) -> Optional[SshSettings]:
+    if config is None:
+        return None  # Having no settings is not an error
+
+    ssh_binary_path = config.get("ssh_binary_path", "/usr/bin/ssh")
+    user = config.get("user", None)
+    host = config.get("host", None)
+    port = config.get("port", 22)
+    identity_file = config.get("identity_file", None)
+    ssh_config = config.get("ssh_config", None)
+    connect_timeout = config.get("connect_timeout", 30)
+    if user is None or host is None:
+        raise ValueError(
+            f"Malformed SSH config did not include user and host keys: {config}"
+        )
+
+    return SshSettings(
+        host,
+        user,
+        port=port,
+        identity_file=identity_file,
+        ssh_config=ssh_config,
+        connect_timeout=connect_timeout,
+        executable=ssh_binary_path,
+    )
diff --git a/src/antlion/controllers/uxm_lib/OWNERS b/src/antlion/controllers/uxm_lib/OWNERS
deleted file mode 100644
index 0c40622..0000000
--- a/src/antlion/controllers/uxm_lib/OWNERS
+++ /dev/null
@@ -1,3 +0,0 @@
-jethier@google.com
-hmtuan@google.com
-harjani@google.com
\ No newline at end of file
diff --git a/src/antlion/controllers/uxm_lib/uxm_cellular_simulator.py b/src/antlion/controllers/uxm_lib/uxm_cellular_simulator.py
deleted file mode 100644
index 713d3cf..0000000
--- a/src/antlion/controllers/uxm_lib/uxm_cellular_simulator.py
+++ /dev/null
@@ -1,707 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import os
-import socket
-import time
-import paramiko
-import re
-
-from antlion.controllers.cellular_simulator import AbstractCellularSimulator
-
-
-class UXMCellularSimulator(AbstractCellularSimulator):
-    """A cellular simulator for UXM callbox."""
-
-    # Keys to obtain data from cell_info dictionary.
-    KEY_CELL_NUMBER = "cell_number"
-    KEY_CELL_TYPE = "cell_type"
-
-    # UXM socket port
-    UXM_PORT = 5125
-
-    # UXM SCPI COMMAND
-    SCPI_IMPORT_STATUS_QUERY_CMD = 'SYSTem:SCPI:IMPort:STATus?'
-    SCPI_SYSTEM_ERROR_CHECK_CMD = 'SYST:ERR?\n'
-    # require: path to SCPI file
-    SCPI_IMPORT_SCPI_FILE_CMD = 'SYSTem:SCPI:IMPort "{}"\n'
-    # require: 1. cell type (E.g. NR5G), 2. cell number (E.g CELL1)
-    SCPI_CELL_ON_CMD = 'BSE:CONFig:{}:{}:ACTive 1'
-    # require: 1. cell type (E.g. NR5G), 2. cell number (E.g CELL1)
-    SCPI_CELL_OFF_CMD = 'BSE:CONFig:{}:{}:ACTive 0'
-    # require: 1. cell type (E.g. NR5G), 2. cell number (E.g CELL1)
-    SCPI_GET_CELL_STATUS = 'BSE:STATus:{}:{}?'
-    SCPI_CHECK_CONNECTION_CMD = '*IDN?\n'
-
-    # UXM's Test Application recovery
-    TA_BOOT_TIME = 100
-
-    # shh command
-    SSH_START_GUI_APP_CMD_FORMAT = 'psexec -s -d -i 1 "{exe_path}"'
-    SSH_CHECK_APP_RUNNING_CMD_FORMAT = 'tasklist | findstr /R {regex_app_name}'
-
-    # start process success regex
-    PSEXEC_PROC_STARTED_REGEX_FORMAT = 'started on * with process ID {proc_id}'
-
-    def __init__(self, ip_address, custom_files, uxm_user,
-                 ssh_private_key_to_uxm, ta_exe_path, ta_exe_name):
-        """Initializes the cellular simulator.
-
-        Args:
-            ip_address: the ip address of host where Keysight Test Application (TA)
-                is installed.
-            custom_files: a list of file path for custom files.
-            uxm_user: username of host where Keysight TA resides.
-            ssh_private_key_to_uxm: private key for key based ssh to
-                host where Keysight TA resides.
-            ta_exe_path: path to TA exe.
-            ta_exe_name: name of TA exe.
-        """
-        super().__init__()
-        self.custom_files = custom_files
-        self.rockbottom_script = None
-        self.cells = []
-        self.uxm_ip = ip_address
-        self.uxm_user = uxm_user
-        self.ssh_private_key_to_uxm = ssh_private_key_to_uxm
-        self.ta_exe_path = ta_exe_path
-        self.ta_exe_name = ta_exe_name
-        self.ssh_client = self._create_ssh_client()
-
-        # get roclbottom file
-        for file in self.custom_files:
-            if 'rockbottom_' in file:
-                self.rockbottom_script = file
-
-        # connect to Keysight Test Application via socket
-        self.recovery_ta()
-        self.socket = self._socket_connect(self.uxm_ip, self.UXM_PORT)
-        self.check_socket_connection()
-        self.timeout = 120
-
-    def _create_ssh_client(self):
-        """Create a ssh client to host."""
-        ssh = paramiko.SSHClient()
-        ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
-        mykey = paramiko.Ed25519Key.from_private_key_file(
-            self.ssh_private_key_to_uxm)
-        ssh.connect(hostname=self.uxm_ip, username=self.uxm_user, pkey=mykey)
-        self.log.info('SSH client to %s is connected' % self.uxm_ip)
-        return ssh
-
-    def is_ta_running(self):
-        is_running_cmd = self.SSH_CHECK_APP_RUNNING_CMD_FORMAT.format(
-            regex_app_name=self.ta_exe_name)
-        stdin, stdout, stderr = self.ssh_client.exec_command(is_running_cmd)
-        stdin.close()
-        err = ''.join(stderr.readlines())
-        out = ''.join(stdout.readlines())
-        final_output = str(out) + str(err)
-        self.log.info(final_output)
-        return (out != '' and err == '')
-
-    def _start_test_app(self):
-        """Start Test Application on Windows."""
-        # start GUI exe via ssh
-        start_app_cmd = self.SSH_START_GUI_APP_CMD_FORMAT.format(
-            exe_path=self.ta_exe_path)
-        stdin, stdout, stderr = self.ssh_client.exec_command(start_app_cmd)
-        self.log.info(f'Command sent to {self.uxm_ip}: {start_app_cmd}')
-        stdin.close()
-        err = ''.join(stderr.readlines())
-        out = ''.join(stdout.readlines())
-        # psexec return process ID as part of the exit code
-        exit_status = stderr.channel.recv_exit_status()
-        is_started = re.search(
-            self.PSEXEC_PROC_STARTED_REGEX_FORMAT.format(proc_id=exit_status),
-            err[-1])
-        if is_started:
-            raise RuntimeError('Fail to start TA: ' + out + err)
-        # wait for ta completely boot up
-        self.log.info('TA is starting')
-        time.sleep(self.TA_BOOT_TIME)
-
-    def recovery_ta(self):
-        """Start TA if it is not running."""
-        if not self.is_ta_running():
-            self._start_test_app()
-            # checking if ta booting process complete
-            # by checking socket connection
-            s = None
-            retries = 12
-            for _ in range(retries):
-                try:
-                    s = self._socket_connect(self.uxm_ip, self.UXM_PORT)
-                    s.close()
-                    return
-                except ConnectionRefusedError as cre:
-                    self.log.info(
-                        'Connection refused, wait 10s for TA to boot')
-                    time.sleep(10)
-            raise RuntimeError('TA does not start on time')
-
-    def set_rockbottom_script_path(self, path):
-        """Set path to rockbottom script.
-
-        Args:
-            path: path to rockbottom script.
-        """
-        self.rockbottom_script = path
-
-    def set_cell_info(self, cell_info):
-        """Set type and number for multiple cells.
-
-        Args:
-            cell_info: list of dictionaries,
-                each dictionary contain cell type
-                and cell number for each cell
-                that the simulator need to control.
-        """
-        if not cell_info:
-            raise ValueError('Missing cell info from configurations file')
-        self.cells = cell_info
-
-    def turn_cell_on(self, cell_type, cell_number):
-        """Turn UXM's cell on.
-
-        Args:
-            cell_type: type of cell (e.g NR5G, LTE).
-            cell_number: ordinal number of a cell.
-        """
-        if cell_type and cell_number:
-            self._socket_send_SCPI_command(
-                self.SCPI_CELL_ON_CMD.format(cell_type, cell_number))
-        else:
-            raise ValueError('Invalid cell info\n' +
-                             f' cell type: {cell_type}\n' +
-                             f' cell number: {cell_number}\n')
-
-    def turn_cell_off(self, cell_type, cell_number):
-        """Turn UXM's cell off.
-
-        Args:
-            cell_type: type of cell (e.g NR5G, LTE).
-            cell_number: ordinal number of a cell.
-        """
-        if cell_type and cell_number:
-            self._socket_send_SCPI_command(
-                self.SCPI_CELL_OFF_CMD.format(cell_type, cell_number))
-        else:
-            raise ValueError('Invalid cell info\n' +
-                             f' cell type: {cell_type}\n' +
-                             f' cell number: {cell_number}\n')
-
-    def get_cell_status(self, cell_type, cell_number):
-        """Get status of cell.
-
-        Args:
-            cell_type: type of cell (e.g NR5G, LTE).
-            cell_number: ordinal number of a cell.
-        """
-        if not cell_type or not cell_number:
-            raise ValueError('Invalid cell with\n' +
-                             f' cell type: {cell_type}\n' +
-                             f' cell number: {cell_number}\n')
-
-        return self._socket_send_SCPI_for_result_command(
-            self.SCPI_GET_CELL_STATUS.format(cell_type, cell_number))
-
-    def check_socket_connection(self):
-        """Check if the socket connection is established.
-
-        Query the identification of the Keysight Test Application
-        we are trying to connect to. Empty response indicates
-        connection fail, and vice versa.
-        """
-        self.socket.sendall(self.SCPI_CHECK_CONNECTION_CMD.encode())
-        response = self.socket.recv(1024).decode()
-        if response:
-            self.log.info(f'Connected to: {response}')
-        else:
-            self.log.error('Fail to connect to callbox')
-
-    def _socket_connect(self, host, port):
-        """Create socket connection.
-
-        Args:
-            host: IP address of desktop where Keysight Test Application resides.
-            port: port that Keysight Test Application is listening for socket
-                communication.
-        Return:
-            s: socket object.
-        """
-        self.log.info('Establishing connection to callbox via socket')
-        s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-        s.connect((host, port))
-        return s
-
-    def _socket_send_SCPI_command(self, command):
-        """Send SCPI command without expecting response.
-
-        Args:
-            command: a string SCPI command.
-        """
-        # make sure there is a line break for the socket to send command
-        command = command + '\n'
-        # send command
-        self.socket.sendall(command.encode())
-        self.log.info(f'Sent {command}')
-
-    def _socket_receive_SCPI_result(self):
-        """Receive response from socket. """
-        i = 1
-        response = ''
-        while i < self.timeout and not response:
-            response = self.socket.recv(1024).decode()
-            i += 1
-        return response
-
-    def _socket_send_SCPI_for_result_command(self, command):
-        """Send SCPI command and expecting response.
-
-        Args:
-            command: a string SCPI command.
-        """
-        self._socket_send_SCPI_command(command)
-        response = self._socket_receive_SCPI_result()
-        return response
-
-    def check_system_error(self):
-        """Query system error from Keysight Test Application.
-
-        Return:
-            status: a message indicate the number of errors
-                and detail of errors if any.
-                a string `0,"No error"` indicates no error.
-        """
-        status = self._socket_send_SCPI_for_result_command(
-            self.SCPI_SYSTEM_ERROR_CHECK_CMD)
-        self.log.info(f'System error status: {status}')
-        return status
-
-    def import_configuration(self, path):
-        """Import SCPI config file.
-
-        Args:
-            path: path to SCPI file.
-        """
-        self._socket_send_SCPI_command(
-            self.SCPI_IMPORT_SCPI_FILE_CMD.format(path))
-        time.sleep(45)
-
-    def destroy(self):
-        """Close socket connection with UXM. """
-        self.socket.close()
-
-    def setup_lte_scenario(self, path):
-        """Configures the equipment for an LTE simulation.
-
-        Args:
-            path: path to SCPI config file.
-        """
-        self.import_configuration(path)
-
-    def dut_rockbottom(self, dut):
-        """Set the dut to rockbottom state.
-
-        Args:
-            dut: a CellularAndroid controller.
-        """
-        # The rockbottom script might include a device reboot, so it is
-        # necessary to stop SL4A during its execution.
-        dut.ad.stop_services()
-        self.log.info('Executing rockbottom script for ' + dut.ad.model)
-        os.chmod(self.rockbottom_script, 0o777)
-        os.system('{} {}'.format(self.rockbottom_script, dut.ad.serial))
-        # Make sure the DUT is in root mode after coming back
-        dut.ad.root_adb()
-        # Restart SL4A
-        dut.ad.start_services()
-
-    def wait_until_attached_one_cell(self,
-                                     cell_type,
-                                     cell_number,
-                                     dut,
-                                     wait_for_camp_interval,
-                                     attach_retries,
-                                     change_dut_setting_allow=True):
-        """Wait until connect to given UXM cell.
-
-        After turn off airplane mode, sleep for
-        wait_for_camp_interval seconds for device to camp.
-        If not device is not connected after the wait,
-        either toggle airplane mode on/off or reboot device.
-        Args:
-            cell_type: type of cell
-                which we are trying to connect to.
-            cell_number: ordinal number of a cell
-                which we are trying to connect to.
-            dut: a CellularAndroid controller.
-            wait_for_camp_interval: sleep interval,
-                wait for device to camp.
-            attach_retries: number of retry
-                to wait for device
-                to connect to 1 basestation.
-            change_dut_setting_allow: turn on/off APM
-                or reboot device helps with device camp time.
-                However, if we are trying to connect to second cell
-                changing APM status or reboot is not allowed.
-        Raise:
-            AbstractCellularSimulator.CellularSimulatorError:
-                device unable to connect to cell.
-        """
-        # airplane mode off
-        # dut.ad.adb.shell('settings put secure adaptive_connectivity_enabled 0')
-        dut.toggle_airplane_mode(False)
-        time.sleep(5)
-        # turn cell on
-        self.turn_cell_on(cell_type, cell_number)
-        time.sleep(5)
-
-        # waits for connect
-        for index in range(1, attach_retries):
-            # airplane mode on
-            time.sleep(wait_for_camp_interval)
-            cell_state = self.get_cell_status(cell_type, cell_number)
-            self.log.info(f'cell state: {cell_state}')
-            if cell_state == 'CONN\n':
-                return True
-            if cell_state == 'OFF\n':
-                self.turn_cell_on(cell_type, cell_number)
-                time.sleep(5)
-            if change_dut_setting_allow:
-                if (index % 4) == 0:
-                    dut.ad.reboot()
-                    if self.rockbottom_script:
-                        self.dut_rockbottom(dut)
-                    else:
-                        self.log.warning(
-                            f'Rockbottom script {self} was not executed after reboot'
-                        )
-                else:
-                    # airplane mode on
-                    dut.toggle_airplane_mode(True)
-                    time.sleep(5)
-                    # airplane mode off
-                    dut.toggle_airplane_mode(False)
-
-        # Phone cannot connected to basestation of callbox
-        raise RuntimeError(
-            f'Phone was unable to connect to cell: {cell_type}-{cell_number}')
-
-    def wait_until_attached(self, dut, timeout, attach_retries):
-        """Waits until the DUT is attached to all required cells.
-
-        Args:
-            dut: a CellularAndroid controller.
-            timeout: sleep interval,
-                wait for device to camp in 1 try.
-            attach_retries: number of retry
-                to wait for device
-                to connect to 1 basestation.
-        """
-        # get cell info
-        first_cell_type = self.cells[0][self.KEY_CELL_TYPE]
-        first_cell_number = self.cells[0][self.KEY_CELL_NUMBER]
-        if len(self.cells) == 2:
-            second_cell_type = self.cells[1][self.KEY_CELL_TYPE]
-            second_cell_number = self.cells[1][self.KEY_CELL_NUMBER]
-
-        # connect to 1st cell
-        try:
-            self.wait_until_attached_one_cell(first_cell_type,
-                                              first_cell_number, dut, timeout,
-                                              attach_retries)
-        except Exception as exc:
-            raise RuntimeError(f'Cannot connect to first cell') from exc
-
-        # connect to 2nd cell
-        if len(self.cells) == 2:
-            self.turn_cell_on(
-                second_cell_type,
-                second_cell_number,
-            )
-            self._socket_send_SCPI_command(
-                'BSE:CONFig:LTE:CELL1:CAGGregation:AGGRegate:NRCC:DL None')
-            self._socket_send_SCPI_command(
-                'BSE:CONFig:LTE:CELL1:CAGGregation:AGGRegate:NRCC:UL None')
-            self._socket_send_SCPI_command(
-                'BSE:CONFig:LTE:CELL1:CAGGregation:AGGRegate:NRCC:DL CELL1')
-            self._socket_send_SCPI_command(
-                'BSE:CONFig:LTE:CELL1:CAGGregation:AGGRegate:NRCC:DL CELL1')
-            time.sleep(1)
-            self._socket_send_SCPI_command(
-                "BSE:CONFig:LTE:CELL1:CAGGregation:AGGRegate:NRCC:APPly")
-            try:
-                self.wait_until_attached_one_cell(second_cell_type,
-                                                  second_cell_number, dut,
-                                                  timeout, attach_retries,
-                                                  False)
-            except Exception as exc:
-                raise RuntimeError(f'Cannot connect to second cell') from exc
-
-    def set_lte_rrc_state_change_timer(self, enabled, time=10):
-        """Configures the LTE RRC state change timer.
-
-        Args:
-            enabled: a boolean indicating if the timer should be on or off.
-            time: time in seconds for the timer to expire.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_band(self, bts_index, band):
-        """Sets the band for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            band: the new band.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def get_duplex_mode(self, band):
-        """Determines if the band uses FDD or TDD duplex mode
-
-        Args:
-            band: a band number.
-
-        Returns:
-            an variable of class DuplexMode indicating if band is FDD or TDD.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_input_power(self, bts_index, input_power):
-        """Sets the input power for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            input_power: the new input power.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_output_power(self, bts_index, output_power):
-        """Sets the output power for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            output_power: the new output power.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_tdd_config(self, bts_index, tdd_config):
-        """Sets the tdd configuration number for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            tdd_config: the new tdd configuration number.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_ssf_config(self, bts_index, ssf_config):
-        """Sets the Special Sub-Frame config number for the indicated.
-
-        base station.
-
-        Args:
-            bts_index: the base station number.
-            ssf_config: the new ssf config number.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_bandwidth(self, bts_index, bandwidth):
-        """Sets the bandwidth for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            bandwidth: the new bandwidth
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_downlink_channel_number(self, bts_index, channel_number):
-        """Sets the downlink channel number for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            channel_number: the new channel number.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_mimo_mode(self, bts_index, mimo_mode):
-        """Sets the mimo mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            mimo_mode: the new mimo mode
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_transmission_mode(self, bts_index, tmode):
-        """Sets the transmission mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            tmode: the new transmission mode.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_scheduling_mode(self,
-                            bts_index,
-                            scheduling,
-                            mcs_dl=None,
-                            mcs_ul=None,
-                            nrb_dl=None,
-                            nrb_ul=None):
-        """Sets the scheduling mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            scheduling: the new scheduling mode.
-            mcs_dl: Downlink MCS.
-            mcs_ul: Uplink MCS.
-            nrb_dl: Number of RBs for downlink.
-            nrb_ul: Number of RBs for uplink.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_dl_256_qam_enabled(self, bts_index, enabled):
-        """Determines what MCS table should be used for the downlink.
-
-        This only saves the setting that will be used when configuring MCS.
-
-        Args:
-            bts_index: the base station number.
-            enabled: whether 256 QAM should be used.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_ul_64_qam_enabled(self, bts_index, enabled):
-        """Determines what MCS table should be used for the uplink.
-
-        This only saves the setting that will be used when configuring MCS.
-
-        Args:
-            bts_index: the base station number.
-            enabled: whether 64 QAM should be used.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_mac_padding(self, bts_index, mac_padding):
-        """Enables or disables MAC padding in the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            mac_padding: the new MAC padding setting.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_cfi(self, bts_index, cfi):
-        """Sets the Channel Format Indicator for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            cfi: the new CFI setting.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_paging_cycle(self, bts_index, cycle_duration):
-        """Sets the paging cycle duration for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            cycle_duration: the new paging cycle duration in milliseconds.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_phich_resource(self, bts_index, phich):
-        """Sets the PHICH Resource setting for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            phich: the new PHICH resource setting.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def lte_attach_secondary_carriers(self, ue_capability_enquiry):
-        """Activates the secondary carriers for CA.
-
-        Requires the DUT to be attached to the primary carrier first.
-
-        Args:
-            ue_capability_enquiry: UE capability enquiry message to be sent to
-              the UE before starting carrier aggregation.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def wait_until_communication_state(self, timeout=120):
-        """Waits until the DUT is in Communication state.
-
-        Args:
-            timeout: after this amount of time the method will raise
-                a CellularSimulatorError exception. Default is 120 seconds.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def wait_until_idle_state(self, timeout=120):
-        """Waits until the DUT is in Idle state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def detach(self):
-        """ Turns off all the base stations so the DUT loose connection."""
-        for cell in self.cells:
-            cell_type = cell[self.KEY_CELL_TYPE]
-            cell_number = cell[self.KEY_CELL_NUMBER]
-            self._socket_send_SCPI_command(
-                self.SCPI_CELL_OFF_CMD.format(cell_type, cell_number))
-
-    def stop(self):
-        """Stops current simulation.
-
-        After calling this method, the simulator will need to be set up again.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def start_data_traffic(self):
-        """Starts transmitting data from the instrument to the DUT. """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def stop_data_traffic(self):
-        """Stops transmitting data from the instrument to the DUT. """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
diff --git a/src/antlion/dict_object.py b/src/antlion/dict_object.py
index 0be7821..9e3288f 100644
--- a/src/antlion/dict_object.py
+++ b/src/antlion/dict_object.py
@@ -67,8 +67,7 @@
         if name in super(DictObject, self).keys():
             super(DictObject, self).__setitem__(name, value)
         else:
-            raise AttributeError("Class does not have attribute {}"
-                                 .format(value))
+            raise AttributeError("Class does not have attribute {}".format(value))
 
     @classmethod
     def from_dict(cls, dictionary):
diff --git a/src/antlion/error.py b/src/antlion/error.py
index 95969e5..bf69b7d 100644
--- a/src/antlion/error.py
+++ b/src/antlion/error.py
@@ -6,14 +6,13 @@
 
 class ActsError(TestError):
     """Base Acts Error"""
+
     def __init__(self, *args, **kwargs):
         class_name = self.__class__.__name__
         self.error_doc = self.__class__.__doc__
-        self.error_code = getattr(ActsErrorCode, class_name,
-                                  ActsErrorCode.UNKNOWN)
-        extras = dict(**kwargs, error_doc=self.error_doc,
-                      error_code=self.error_code)
-        details = args[0] if len(args) > 0 else ''
+        self.error_code = getattr(ActsErrorCode, class_name, ActsErrorCode.UNKNOWN)
+        extras = dict(**kwargs, error_doc=self.error_doc, error_code=self.error_code)
+        details = args[0] if len(args) > 0 else ""
         super().__init__(details, extras)
 
 
diff --git a/src/antlion/event/decorators.py b/src/antlion/event/decorators.py
index c3f7b83..b845dad 100644
--- a/src/antlion/event/decorators.py
+++ b/src/antlion/event/decorators.py
@@ -23,22 +23,20 @@
 
     This function must be registered manually.
     """
+
     class InnerSubscriptionHandle(StaticSubscriptionHandle):
         def __init__(self, func):
-            super().__init__(event_type, func,
-                             event_filter=event_filter,
-                             order=order)
+            super().__init__(event_type, func, event_filter=event_filter, order=order)
 
     return InnerSubscriptionHandle
 
 
 def subscribe(event_type, event_filter=None, order=0):
     """A decorator that subscribes an instance method."""
+
     class InnerSubscriptionHandle(InstanceSubscriptionHandle):
         def __init__(self, func):
-            super().__init__(event_type, func,
-                             event_filter=event_filter,
-                             order=order)
+            super().__init__(event_type, func, event_filter=event_filter, order=order)
 
     return InnerSubscriptionHandle
 
@@ -58,8 +56,7 @@
 
 
 def register_instance_subscriptions(obj):
-    """A decorator that subscribes all instance subscriptions after object init.
-    """
+    """A decorator that subscribes all instance subscriptions after object init."""
     old_init = obj.__init__
 
     def init_replacement(self, *args, **kwargs):
diff --git a/src/antlion/event/event_bus.py b/src/antlion/event/event_bus.py
index 9c6a862..5488b80 100644
--- a/src/antlion/event/event_bus.py
+++ b/src/antlion/event/event_bus.py
@@ -58,9 +58,9 @@
         Returns:
             A registration ID.
         """
-        subscription = EventSubscription(event_type, func,
-                                         event_filter=filter_fn,
-                                         order=order)
+        subscription = EventSubscription(
+            event_type, func, event_filter=filter_fn, order=order
+        )
         return self.register_subscription(subscription)
 
     def register_subscriptions(self, subscriptions):
@@ -124,8 +124,9 @@
                 subscription.deliver(event)
             except Exception:
                 if ignore_errors:
-                    logging.exception('An exception occurred while handling '
-                                      'an event.')
+                    logging.exception(
+                        "An exception occurred while handling " "an event."
+                    )
                     continue
                 raise
 
@@ -146,20 +147,24 @@
         elif type(registration_id) is not int:
             raise ValueError(
                 'Subscription ID "%s" is not a valid ID. This value'
-                'must be an integer ID returned from subscribe().'
-                % registration_id)
+                "must be an integer ID returned from subscribe()." % registration_id
+            )
         else:
             # The value is a "valid" id, but is not subscribed. It's possible
             # another thread has unsubscribed this value.
-            logging.warning('Attempted to unsubscribe %s, but the matching '
-                            'subscription cannot be found.' % registration_id)
+            logging.warning(
+                "Attempted to unsubscribe %s, but the matching "
+                "subscription cannot be found." % registration_id
+            )
             return False
 
         event_type = subscription.event_type
         with self._subscription_lock:
             self._registration_id_map.pop(registration_id, None)
-            if (event_type in self._subscriptions and
-                    subscription in self._subscriptions[event_type]):
+            if (
+                event_type in self._subscriptions
+                and subscription in self._subscriptions[event_type]
+            ):
                 self._subscriptions[event_type].remove(subscription)
         return True
 
@@ -201,8 +206,7 @@
     Returns:
         A registration ID.
     """
-    return _event_bus.register(event_type, func, filter_fn=filter_fn,
-                               order=order)
+    return _event_bus.register(event_type, func, filter_fn=filter_fn, order=order)
 
 
 def register_subscriptions(subscriptions):
@@ -284,9 +288,9 @@
         self.registration_id = None
 
     def __enter__(self):
-        self.registration_id = _event_bus.register(self.event_type, self.func,
-                                                   filter_fn=self.filter_fn,
-                                                   order=self.order)
+        self.registration_id = _event_bus.register(
+            self.event_type, self.func, filter_fn=self.filter_fn, order=self.order
+        )
 
     def __exit__(self, *unused):
         _event_bus.unregister(self.registration_id)
diff --git a/src/antlion/event/event_subscription.py b/src/antlion/event/event_subscription.py
index e442507..ee8720c 100644
--- a/src/antlion/event/event_subscription.py
+++ b/src/antlion/event/event_subscription.py
@@ -25,6 +25,7 @@
                        to the subscribed function.
         order: The order value in which this subscription should be called.
     """
+
     def __init__(self, event_type, func, event_filter=None, order=0):
         self._event_type = event_type
         self._func = func
diff --git a/src/antlion/event/subscription_bundle.py b/src/antlion/event/subscription_bundle.py
index d936bd2..ac3cfb6 100644
--- a/src/antlion/event/subscription_bundle.py
+++ b/src/antlion/event/subscription_bundle.py
@@ -25,8 +25,7 @@
         """True if this SubscriptionBundle has been registered."""
         return self._registered
 
-    def add(self, event_type, func, event_filter=None,
-            order=0):
+    def add(self, event_type, func, event_filter=None, order=0):
         """Adds a new Subscription to this SubscriptionBundle.
 
         If this SubscriptionBundle is registered, the added Subscription will
@@ -35,9 +34,9 @@
         Returns:
             the EventSubscription object created.
         """
-        subscription = EventSubscription(event_type, func,
-                                         event_filter=event_filter,
-                                         order=order)
+        subscription = EventSubscription(
+            event_type, func, event_filter=event_filter, order=order
+        )
         return self.add_subscription(subscription)
 
     def add_subscription(self, subscription):
@@ -79,11 +78,14 @@
             self._registered = True
             for subscription, registration_id in self.subscriptions.items():
                 if registration_id is not None:
-                    logging.warning('Registered subscription found in '
-                                    'unregistered SubscriptionBundle: %s, %s' %
-                                    (subscription, registration_id))
-                self.subscriptions[subscription] = (
-                    event_bus.register_subscription(subscription))
+                    logging.warning(
+                        "Registered subscription found in "
+                        "unregistered SubscriptionBundle: %s, %s"
+                        % (subscription, registration_id)
+                    )
+                self.subscriptions[subscription] = event_bus.register_subscription(
+                    subscription
+                )
 
     def unregister(self):
         """Unregisters all subscriptions managed by this SubscriptionBundle."""
@@ -93,9 +95,11 @@
             self._registered = False
             for subscription, registration_id in self.subscriptions.items():
                 if registration_id is None:
-                    logging.warning('Unregistered subscription found in '
-                                    'registered SubscriptionBundle: %s, %s' %
-                                    (subscription, registration_id))
+                    logging.warning(
+                        "Unregistered subscription found in "
+                        "registered SubscriptionBundle: %s, %s"
+                        % (subscription, registration_id)
+                    )
                 event_bus.unregister(subscription)
                 self.subscriptions[subscription] = None
 
@@ -122,8 +126,7 @@
     Returns:
         An unregistered SubscriptionBundle.
     """
-    return _create_from_object(instance, instance.__class__,
-                               InstanceSubscriptionHandle)
+    return _create_from_object(instance, instance.__class__, InstanceSubscriptionHandle)
 
 
 def _create_from_object(obj, obj_to_search, subscription_handle_type):
diff --git a/src/antlion/event/subscription_handle.py b/src/antlion/event/subscription_handle.py
index 0123ab7..6aa9c3c 100644
--- a/src/antlion/event/subscription_handle.py
+++ b/src/antlion/event/subscription_handle.py
@@ -31,9 +31,12 @@
     def subscription(self):
         if self._subscription:
             return self._subscription
-        self._subscription = EventSubscription(self._event_type, self._func,
-                                               event_filter=self._event_filter,
-                                               order=self._order)
+        self._subscription = EventSubscription(
+            self._event_type,
+            self._func,
+            event_filter=self._event_filter,
+            order=self._order,
+        )
         return self._subscription
 
     def __get__(self, instance, owner):
@@ -44,8 +47,9 @@
 
         # Otherwise, we create a new SubscriptionHandle that will only be used
         # for the instance that owns this SubscriptionHandle.
-        ret = SubscriptionHandle(self._event_type, self._func,
-                                 self._event_filter, self._order)
+        ret = SubscriptionHandle(
+            self._event_type, self._func, self._event_filter, self._order
+        )
         ret._owner = instance
         ret._func = ret._wrap_call(ret._func)
         for attr, value in owner.__dict__.items():
@@ -60,6 +64,7 @@
                 return func(*args, **kwargs)
             else:
                 return func(self._owner, *args, **kwargs)
+
         return _wrapped_call
 
     def __call__(self, *args, **kwargs):
diff --git a/src/antlion/keys.py b/src/antlion/keys.py
index a89898c..b545d44 100644
--- a/src/antlion/keys.py
+++ b/src/antlion/keys.py
@@ -22,73 +22,55 @@
 
 
 class Config(enum.Enum):
-    """Enum values for test config related lookups.
-    """
+    """Enum values for test config related lookups."""
+
     # Keys used to look up values from test config files.
     # These keys define the wording of test configs and their internal
     # references.
-    key_log_path = 'logpath'
-    key_testbeds_under_test = 'testbeds_under_test'
-    key_testbed = 'testbed'
-    key_testbed_name = 'name'
+    key_log_path = "logpath"
+    key_testbeds_under_test = "testbeds_under_test"
+    key_testbed = "testbed"
+    key_testbed_name = "name"
     # configpath is the directory. key_config_full_path is the file path.
-    key_config_path = 'configpath'
-    key_config_full_path = 'config_full_path'
-    key_test_paths = 'testpaths'
-    key_port = 'Port'
-    key_address = 'Address'
-    key_test_case_iterations = 'test_case_iterations'
-    key_test_failure_tracebacks = 'test_failure_tracebacks'
+    key_config_path = "configpath"
+    key_config_full_path = "config_full_path"
+    key_test_paths = "testpaths"
+    key_port = "Port"
+    key_address = "Address"
+    key_test_case_iterations = "test_case_iterations"
+    key_test_failure_tracebacks = "test_failure_tracebacks"
     # Config names for controllers packaged in ACTS.
-    key_android_device = 'AndroidDevice'
-    key_bits = 'Bits'
-    key_bluetooth_pts_device = 'BluetoothPtsDevice'
-    key_fuchsia_device = 'FuchsiaDevice'
-    key_buds_device = 'BudsDevice'
-    key_chameleon_device = 'ChameleonDevice'
-    key_native_android_device = 'NativeAndroidDevice'
-    key_relay_device = 'RelayDevice'
-    key_access_point = 'AccessPoint'
-    key_attenuator = 'Attenuator'
-    key_iperf_server = 'IPerfServer'
-    key_iperf_client = 'IPerfClient'
-    key_packet_sender = 'PacketSender'
-    key_monsoon = 'Monsoon'
-    key_sniffer = 'Sniffer'
-    key_arduino_wifi_dongle = 'ArduinoWifiDongle'
-    key_packet_capture = 'PacketCapture'
-    key_pdu = 'PduDevice'
-    key_openwrt_ap = 'OpenWrtAP'
-    key_tigertail = 'Tigertail'
-    key_asus_axe11000_ap = 'AsusAXE11000AP'
+    key_access_point = "AccessPoint"
+    key_android_device = "AndroidDevice"
+    key_attenuator = "Attenuator"
+    key_bluetooth_pts_device = "BluetoothPtsDevice"
+    key_fuchsia_device = "FuchsiaDevice"
+    key_iperf_client = "IPerfClient"
+    key_iperf_server = "IPerfServer"
+    key_openwrt_ap = "OpenWrtAP"
+    key_packet_capture = "PacketCapture"
+    key_packet_sender = "PacketSender"
+    key_pdu = "PduDevice"
+    key_sniffer = "Sniffer"
     # Internal keys, used internally, not exposed to user's config files.
-    ikey_user_param = 'user_params'
-    ikey_testbed_name = 'testbed_name'
-    ikey_logger = 'log'
-    ikey_logpath = 'log_path'
-    ikey_summary_writer = 'summary_writer'
+    ikey_user_param = "user_params"
+    ikey_testbed_name = "testbed_name"
+    ikey_logger = "log"
+    ikey_logpath = "log_path"
+    ikey_summary_writer = "summary_writer"
     # module name of controllers packaged in ACTS.
-    m_key_bits = 'bits'
-    m_key_monsoon = 'monsoon'
-    m_key_android_device = 'android_device'
-    m_key_fuchsia_device = 'fuchsia_device'
-    m_key_bluetooth_pts_device = 'bluetooth_pts_device'
-    m_key_buds_device = 'buds_controller'
-    m_key_chameleon_device = 'chameleon_controller'
-    m_key_native_android_device = 'native_android_device'
-    m_key_relay_device = 'relay_device_controller'
-    m_key_access_point = 'access_point'
-    m_key_attenuator = 'attenuator'
-    m_key_iperf_server = 'iperf_server'
-    m_key_iperf_client = 'iperf_client'
-    m_key_packet_sender = 'packet_sender'
-    m_key_sniffer = 'sniffer'
-    m_key_arduino_wifi_dongle = 'arduino_wifi_dongle'
-    m_key_packet_capture = 'packet_capture'
-    m_key_pdu = 'pdu'
-    m_key_openwrt_ap = 'openwrt_ap'
-    m_key_tigertail = 'tigertail'
-    m_key_asus_axe11000_ap = 'asus_axe11000_ap'
+    m_key_access_point = "access_point"
+    m_key_android_device = "android_device"
+    m_key_attenuator = "attenuator"
+    m_key_bluetooth_pts_device = "bluetooth_pts_device"
+    m_key_fuchsia_device = "fuchsia_device"
+    m_key_iperf_client = "iperf_client"
+    m_key_iperf_server = "iperf_server"
+    m_key_openwrt_ap = "openwrt_ap"
+    m_key_packet_capture = "packet_capture"
+    m_key_packet_sender = "packet_sender"
+    m_key_pdu = "pdu"
+    m_key_sniffer = "sniffer"
 
     # A list of keys whose values in configs should not be passed to test
     # classes without unpacking first.
@@ -96,32 +78,20 @@
 
     # Controller names packaged with ACTS.
     builtin_controller_names = [
+        key_access_point,
         key_android_device,
-        key_bits,
+        key_attenuator,
         key_bluetooth_pts_device,
         key_fuchsia_device,
-        key_buds_device,
-        key_native_android_device,
-        key_relay_device,
-        key_access_point,
-        key_attenuator,
-        key_iperf_server,
         key_iperf_client,
-        key_packet_sender,
-        key_monsoon,
-        key_sniffer,
-        key_chameleon_device,
-        key_arduino_wifi_dongle,
-        key_packet_capture,
-        key_pdu,
+        key_iperf_server,
         key_openwrt_ap,
-        key_tigertail,
-        key_asus_axe11000_ap,
+        key_packet_capture,
+        key_packet_sender,
+        key_pdu,
+        key_sniffer,
     ]
 
-    # Keys that are file or folder paths.
-    file_path_keys = [key_relay_device]
-
 
 def get_name_by_value(value):
     for name, member in Config.__members__.items():
@@ -131,9 +101,8 @@
 
 
 def get_module_name(name_in_config):
-    """Translates the name of a controller in config file to its module name.
-    """
-    return value_to_value(name_in_config, 'm_%s')
+    """Translates the name of a controller in config file to its module name."""
+    return value_to_value(name_in_config, "m_%s")
 
 
 def value_to_value(ref_value, pattern):
diff --git a/src/antlion/libs/logging/log_stream.py b/src/antlion/libs/logging/log_stream.py
index b457e46..27aa077 100644
--- a/src/antlion/libs/logging/log_stream.py
+++ b/src/antlion/libs/logging/log_stream.py
@@ -87,7 +87,6 @@
     }
 # yapf: enable
 
-
 _log_streams = dict()
 _null_handler = logging.NullHandler()
 
@@ -101,9 +100,15 @@
 event_bus.register_subscription(_update_handlers.subscription)
 
 
-def create_logger(name, log_name=None, base_path='', subcontext='',
-                  log_styles=LogStyles.NONE, stream_format=None,
-                  file_format=None):
+def create_logger(
+    name,
+    log_name=None,
+    base_path="",
+    subcontext="",
+    log_styles=LogStyles.NONE,
+    stream_format=None,
+    file_format=None,
+):
     """Creates a Python Logger object with the given attributes.
 
     Creation through this method will automatically manage the logger in the
@@ -130,8 +135,9 @@
     """
     if name in _log_streams:
         _log_streams[name].cleanup()
-    log_stream = _LogStream(name, log_name, base_path, subcontext, log_styles,
-                            stream_format, file_format)
+    log_stream = _LogStream(
+        name, log_name, base_path, subcontext, log_styles, stream_format, file_format
+    )
     _set_logger(log_stream)
     return log_stream.logger
 
@@ -160,6 +166,7 @@
     """FileHandler implementation that allows the output file to be changed
     during operation.
     """
+
     def set_file(self, file_name):
         """Set the target output file to file_name.
 
@@ -179,6 +186,7 @@
     changed during operation. Rotated files will automatically adopt the newest
     output path.
     """
+
     set_file = MovableFileHandler.set_file
 
 
@@ -203,9 +211,16 @@
         file_format: Format used for log output to files
     """
 
-    def __init__(self, name, log_name=None, base_path='', subcontext='',
-                 log_styles=LogStyles.NONE, stream_format=None,
-                 file_format=None):
+    def __init__(
+        self,
+        name,
+        log_name=None,
+        base_path="",
+        subcontext="",
+        log_styles=LogStyles.NONE,
+        stream_format=None,
+        file_format=None,
+    ):
         """Creates a LogStream.
 
         Args:
@@ -235,8 +250,7 @@
         # Add a NullHandler to suppress unwanted console output
         self.logger.addHandler(_null_handler)
         self.logger.propagate = False
-        self.base_path = base_path or getattr(logging, 'log_path',
-                                              '/tmp/acts_logs')
+        self.base_path = base_path or getattr(logging, "log_path", "/tmp/acts_logs")
         self.subcontext = subcontext
         context.TestContext.add_base_output_path(self.logger.name, self.base_path)
         context.TestContext.add_subcontext(self.logger.name, self.subcontext)
@@ -273,8 +287,9 @@
         """
 
         def invalid_style_error(message):
-            raise InvalidStyleSetError('{LogStyle Set: %s} %s' %
-                                       (_log_styles_list, message))
+            raise InvalidStyleSetError(
+                "{LogStyle Set: %s} %s" % (_log_styles_list, message)
+            )
 
         # Store the log locations that have already been set per level.
         levels_dict = {}
@@ -288,39 +303,48 @@
                         if log_style & log_location:
                             if log_location & levels_dict[level]:
                                 invalid_style_error(
-                                    'The log location %s for log level %s has '
-                                    'been set multiple times' %
-                                    (log_location, level))
+                                    "The log location %s for log level %s has "
+                                    "been set multiple times" % (log_location, level)
+                                )
                             else:
                                 levels_dict[level] |= log_location
                     # Check that for a given log-level, not more than one
                     # of MONOLITH_LOG, TESTCLASS_LOG, TESTCASE_LOG is set.
                     locations = levels_dict[level] & LogStyles.ALL_FILE_LOGS
                     valid_locations = [
-                        LogStyles.TESTCASE_LOG, LogStyles.TESTCLASS_LOG,
-                        LogStyles.MONOLITH_LOG, LogStyles.NONE]
+                        LogStyles.TESTCASE_LOG,
+                        LogStyles.TESTCLASS_LOG,
+                        LogStyles.MONOLITH_LOG,
+                        LogStyles.NONE,
+                    ]
                     if locations not in valid_locations:
                         invalid_style_error(
-                            'More than one of MONOLITH_LOG, TESTCLASS_LOG, '
-                            'TESTCASE_LOG is set for log level %s.' % level)
+                            "More than one of MONOLITH_LOG, TESTCLASS_LOG, "
+                            "TESTCASE_LOG is set for log level %s." % level
+                        )
             if log_style & LogStyles.ALL_LEVELS == 0:
-                invalid_style_error('LogStyle %s needs to set a log '
-                                    'level.' % log_style)
+                invalid_style_error(
+                    "LogStyle %s needs to set a log " "level." % log_style
+                )
             if log_style & ~LogStyles.ALL_LEVELS == 0:
-                invalid_style_error('LogStyle %s needs to set a log '
-                                    'location.' % log_style)
+                invalid_style_error(
+                    "LogStyle %s needs to set a log " "location." % log_style
+                )
             if log_style & LogStyles.ROTATE_LOGS and not log_style & (
-                    LogStyles.MONOLITH_LOG | LogStyles.TESTCLASS_LOG |
-                    LogStyles.TESTCASE_LOG):
-                invalid_style_error('LogStyle %s has ROTATE_LOGS set, but does '
-                                    'not specify a log type.' % log_style)
+                LogStyles.MONOLITH_LOG
+                | LogStyles.TESTCLASS_LOG
+                | LogStyles.TESTCASE_LOG
+            ):
+                invalid_style_error(
+                    "LogStyle %s has ROTATE_LOGS set, but does "
+                    "not specify a log type." % log_style
+                )
 
     @staticmethod
     def __create_rotating_file_handler(filename):
         """Generates a callable to create an appropriate RotatingFileHandler."""
         # Magic number explanation: 10485760 == 10MB
-        return MovableRotatingFileHandler(filename, maxBytes=10485760,
-                                          backupCount=5)
+        return MovableRotatingFileHandler(filename, maxBytes=10485760, backupCount=5)
 
     @staticmethod
     def __get_file_handler_creator(log_style):
@@ -363,8 +387,9 @@
         Returns: A FileHandler
         """
         directory = self.__get_current_output_dir(
-            LogStyles.LOCATION_TO_CONTEXT_LEVEL[location])
-        base_name = '%s_%s.txt' % (self.name, LogStyles.LEVEL_NAMES[level])
+            LogStyles.LOCATION_TO_CONTEXT_LEVEL[location]
+        )
+        base_name = "%s_%s.txt" % (self.name, LogStyles.LEVEL_NAMES[level])
         handler = creator(os.path.join(directory, base_name))
         handler.setLevel(LogStyles.LEVEL_TO_NO[level])
         if self.file_format:
@@ -395,8 +420,7 @@
             if not (log_style & log_level and log_location):
                 continue
 
-            handler = self.__create_handler(
-                handler_creator, log_level, log_location)
+            handler = self.__create_handler(handler_creator, log_level, log_location)
             self.logger.addHandler(handler)
 
             if log_style & LogStyles.TESTCLASS_LOG:
diff --git a/src/antlion/libs/ota/ota_runners/ota_runner.py b/src/antlion/libs/ota/ota_runners/ota_runner.py
index 4b20564..05dba4c 100644
--- a/src/antlion/libs/ota/ota_runners/ota_runner.py
+++ b/src/antlion/libs/ota/ota_runners/ota_runner.py
@@ -19,10 +19,8 @@
 
 """The setup time in seconds."""
 SL4A_SERVICE_SETUP_TIME = 5
-
-
 """The path to the metadata found within the OTA package."""
-OTA_PACKAGE_METADATA_PATH = 'META-INF/com/android/metadata'
+OTA_PACKAGE_METADATA_PATH = "META-INF/com/android/metadata"
 
 
 class OtaError(Exception):
@@ -44,40 +42,43 @@
     def _update(self):
         post_build_id = self.get_post_build_id()
         log = self.android_device.log
-        old_info = self.android_device.adb.getprop('ro.build.fingerprint')
-        log.info('Starting Update. Beginning build info: %s', old_info)
-        log.info('Stopping services.')
+        old_info = self.android_device.adb.getprop("ro.build.fingerprint")
+        log.info("Starting Update. Beginning build info: %s", old_info)
+        log.info("Stopping services.")
         self.android_device.stop_services()
-        log.info('Beginning tool.')
+        log.info("Beginning tool.")
         self.ota_tool.update(self)
-        log.info('Tool finished. Waiting for boot completion.')
+        log.info("Tool finished. Waiting for boot completion.")
         self.android_device.wait_for_boot_completion()
-        new_info = self.android_device.adb.getprop('ro.build.fingerprint')
+        new_info = self.android_device.adb.getprop("ro.build.fingerprint")
         if not old_info or old_info == new_info:
-            raise OtaError('The device was not updated to a new build. '
-                           'Previous build: %s. Current build: %s. '
-                           'Expected build: %s' % (old_info, new_info,
-                                                   post_build_id))
-        log.info('Boot completed. Rooting adb.')
+            raise OtaError(
+                "The device was not updated to a new build. "
+                "Previous build: %s. Current build: %s. "
+                "Expected build: %s" % (old_info, new_info, post_build_id)
+            )
+        log.info("Boot completed. Rooting adb.")
         self.android_device.root_adb()
-        log.info('Root complete.')
+        log.info("Root complete.")
         if self.android_device.skip_sl4a:
-            self.android_device.log.info('Skipping SL4A install.')
+            self.android_device.log.info("Skipping SL4A install.")
         else:
             for _ in range(3):
-                self.android_device.log.info('Re-installing SL4A from "%s".',
-                                             self.get_sl4a_apk())
+                self.android_device.log.info(
+                    'Re-installing SL4A from "%s".', self.get_sl4a_apk()
+                )
                 self.android_device.adb.install(
-                    '-r -g %s' % self.get_sl4a_apk(), ignore_status=True)
+                    "-r -g %s" % self.get_sl4a_apk(), ignore_status=True
+                )
                 time.sleep(SL4A_SERVICE_SETUP_TIME)
                 if self.android_device.is_sl4a_installed():
                     break
-        log.info('Starting services.')
+        log.info("Starting services.")
         self.android_device.start_services()
         self.android_device.update_sdk_api_level()
-        log.info('Services started. Running ota tool cleanup.')
+        log.info("Services started. Running ota tool cleanup.")
         self.ota_tool.cleanup(self)
-        log.info('Cleanup complete.')
+        log.info("Cleanup complete.")
 
     def get_ota_package_metadata(self, requested_field):
         """Returns a variable found within the OTA package's metadata.
@@ -87,15 +88,15 @@
 
         Will return None if the variable cannot be found.
         """
-        ota_zip = ZipFile(self.get_ota_package(), 'r')
+        ota_zip = ZipFile(self.get_ota_package(), "r")
         if OTA_PACKAGE_METADATA_PATH in ota_zip.namelist():
             with ota_zip.open(OTA_PACKAGE_METADATA_PATH) as metadata:
-                timestamp_line = requested_field.encode('utf-8')
+                timestamp_line = requested_field.encode("utf-8")
                 timestamp_offset = len(timestamp_line) + 1
 
                 for line in metadata.readlines():
                     if line.startswith(timestamp_line):
-                        return line[timestamp_offset:].decode('utf-8').strip()
+                        return line[timestamp_offset:].decode("utf-8").strip()
         return None
 
     def validate_update(self):
@@ -106,28 +107,36 @@
                 validated.
         """
         # The timestamp the current device build was created at.
-        cur_img_timestamp = self.android_device.adb.getprop('ro.build.date.utc')
-        ota_img_timestamp = self.get_ota_package_metadata('post-timestamp')
+        cur_img_timestamp = self.android_device.adb.getprop("ro.build.date.utc")
+        ota_img_timestamp = self.get_ota_package_metadata("post-timestamp")
 
         if ota_img_timestamp is None:
-            raise InvalidOtaUpdateError('Unable to find the timestamp '
-                                        'for the OTA build.')
+            raise InvalidOtaUpdateError(
+                "Unable to find the timestamp " "for the OTA build."
+            )
 
         try:
             if int(ota_img_timestamp) <= int(cur_img_timestamp):
                 cur_fingerprint = self.android_device.adb.getprop(
-                    'ro.bootimage.build.fingerprint')
+                    "ro.bootimage.build.fingerprint"
+                )
                 ota_fingerprint = self.get_post_build_id()
                 raise InvalidOtaUpdateError(
-                    'The OTA image comes from an earlier build than the '
-                    'source build. Current build: Time: %s -- %s, '
-                    'OTA build: Time: %s -- %s' %
-                    (cur_img_timestamp, cur_fingerprint,
-                     ota_img_timestamp, ota_fingerprint))
+                    "The OTA image comes from an earlier build than the "
+                    "source build. Current build: Time: %s -- %s, "
+                    "OTA build: Time: %s -- %s"
+                    % (
+                        cur_img_timestamp,
+                        cur_fingerprint,
+                        ota_img_timestamp,
+                        ota_fingerprint,
+                    )
+                )
         except ValueError:
             raise InvalidOtaUpdateError(
-                'Unable to parse timestamps. Current timestamp: %s, OTA '
-                'timestamp: %s' % (ota_img_timestamp, cur_img_timestamp))
+                "Unable to parse timestamps. Current timestamp: %s, OTA "
+                "timestamp: %s" % (ota_img_timestamp, cur_img_timestamp)
+            )
 
     def get_post_build_id(self):
         """Returns the post-build ID found within the OTA package metadata.
@@ -135,7 +144,7 @@
         Raises:
             InvalidOtaUpdateError if the post-build ID cannot be found.
         """
-        return self.get_ota_package_metadata('post-build')
+        return self.get_ota_package_metadata("post-build")
 
     def can_update(self):
         """Whether or not an update package is available for the device."""
@@ -168,8 +177,9 @@
     def update(self):
         """Starts the update process."""
         if not self.can_update():
-            raise OtaError('A SingleUseOtaTool instance cannot update a device '
-                           'multiple times.')
+            raise OtaError(
+                "A SingleUseOtaTool instance cannot update a device " "multiple times."
+            )
         self._called = True
         self._update()
 
@@ -201,8 +211,10 @@
     def update(self):
         """Starts the update process."""
         if not self.can_update():
-            raise OtaError('This MultiUseOtaRunner has already updated all '
-                           'given packages onto the phone.')
+            raise OtaError(
+                "This MultiUseOtaRunner has already updated all "
+                "given packages onto the phone."
+            )
         self._update()
         self.current_update_number += 1
 
diff --git a/src/antlion/libs/ota/ota_runners/ota_runner_factory.py b/src/antlion/libs/ota/ota_runners/ota_runner_factory.py
index 311b045..a5622da 100644
--- a/src/antlion/libs/ota/ota_runners/ota_runner_factory.py
+++ b/src/antlion/libs/ota/ota_runners/ota_runner_factory.py
@@ -24,7 +24,7 @@
 _bound_devices = {}
 
 DEFAULT_OTA_TOOL = adb_sideload_ota_tool.AdbSideloadOtaTool.__name__
-DEFAULT_OTA_COMMAND = 'adb'
+DEFAULT_OTA_COMMAND = "adb"
 
 
 def create_all_from_configs(config, android_devices):
@@ -63,17 +63,19 @@
     # Default to adb sideload
     try:
         ota_tool_class_name = get_ota_value_from_config(
-            config, 'ota_tool', android_device)
+            config, "ota_tool", android_device
+        )
     except ActsConfigError:
         ota_tool_class_name = DEFAULT_OTA_TOOL
 
     if ota_tool_class_name not in config:
         if ota_tool_class_name is not DEFAULT_OTA_TOOL:
             raise ActsConfigError(
-                'If the ota_tool is overloaded, the path to the tool must be '
+                "If the ota_tool is overloaded, the path to the tool must be "
                 'added to the ACTS config file under {"OtaToolName": '
-                '"path/to/tool"} (in this case, {"%s": "path/to/tool"}.' %
-                ota_tool_class_name)
+                '"path/to/tool"} (in this case, {"%s": "path/to/tool"}.'
+                % ota_tool_class_name
+            )
         else:
             command = DEFAULT_OTA_COMMAND
     else:
@@ -85,26 +87,28 @@
             else:
                 raise ActsConfigError(
                     'Config value for "%s" must be either a string or a list '
-                    'of exactly one element' % ota_tool_class_name)
+                    "of exactly one element" % ota_tool_class_name
+                )
 
-    ota_package = get_ota_value_from_config(config, 'ota_package',
-                                            android_device)
-    ota_sl4a = get_ota_value_from_config(config, 'ota_sl4a', android_device)
+    ota_package = get_ota_value_from_config(config, "ota_package", android_device)
+    ota_sl4a = get_ota_value_from_config(config, "ota_sl4a", android_device)
     if type(ota_sl4a) != type(ota_package):
         raise ActsConfigError(
-            'The ota_package and ota_sl4a must either both be strings, or '
-            'both be lists. Device with serial "%s" has mismatched types.' %
-            android_device.serial)
-    return create(ota_package, ota_sl4a, android_device, ota_tool_class_name,
-                  command)
+            "The ota_package and ota_sl4a must either both be strings, or "
+            'both be lists. Device with serial "%s" has mismatched types.'
+            % android_device.serial
+        )
+    return create(ota_package, ota_sl4a, android_device, ota_tool_class_name, command)
 
 
-def create(ota_package,
-           ota_sl4a,
-           android_device,
-           ota_tool_class_name=DEFAULT_OTA_TOOL,
-           command=DEFAULT_OTA_COMMAND,
-           use_cached_runners=True):
+def create(
+    ota_package,
+    ota_sl4a,
+    android_device,
+    ota_tool_class_name=DEFAULT_OTA_TOOL,
+    command=DEFAULT_OTA_COMMAND,
+    use_cached_runners=True,
+):
     """
     Args:
         ota_package: A string or list of strings corresponding to the
@@ -121,15 +125,14 @@
         An OtaRunner with the given properties from the arguments.
     """
     ota_tool = ota_tool_factory.create(ota_tool_class_name, command)
-    return create_from_package(ota_package, ota_sl4a, android_device, ota_tool,
-                               use_cached_runners)
+    return create_from_package(
+        ota_package, ota_sl4a, android_device, ota_tool, use_cached_runners
+    )
 
 
-def create_from_package(ota_package,
-                        ota_sl4a,
-                        android_device,
-                        ota_tool,
-                        use_cached_runners=True):
+def create_from_package(
+    ota_package, ota_sl4a, android_device, ota_tool, use_cached_runners=True
+):
     """
     Args:
         ota_package: A string or list of strings corresponding to the
@@ -145,25 +148,32 @@
         An OtaRunner with the given properties from the arguments.
     """
     if android_device in _bound_devices and use_cached_runners:
-        logging.warning('Android device %s has already been assigned an '
-                        'OtaRunner. Returning previously created runner.')
+        logging.warning(
+            "Android device %s has already been assigned an "
+            "OtaRunner. Returning previously created runner."
+        )
         return _bound_devices[android_device]
 
     if type(ota_package) != type(ota_sl4a):
         raise TypeError(
-            'The ota_package and ota_sl4a must either both be strings, or '
+            "The ota_package and ota_sl4a must either both be strings, or "
             'both be lists. Device with serial "%s" has requested mismatched '
-            'types.' % android_device.serial)
+            "types." % android_device.serial
+        )
 
     if type(ota_package) is str:
-        runner = ota_runner.SingleUseOtaRunner(ota_tool, android_device,
-                                               ota_package, ota_sl4a)
+        runner = ota_runner.SingleUseOtaRunner(
+            ota_tool, android_device, ota_package, ota_sl4a
+        )
     elif type(ota_package) is list:
-        runner = ota_runner.MultiUseOtaRunner(ota_tool, android_device,
-                                              ota_package, ota_sl4a)
+        runner = ota_runner.MultiUseOtaRunner(
+            ota_tool, android_device, ota_package, ota_sl4a
+        )
     else:
-        raise TypeError('The "ota_package" value in the acts config must be '
-                        'either a list or a string.')
+        raise TypeError(
+            'The "ota_package" value in the acts config must be '
+            "either a list or a string."
+        )
 
     _bound_devices[android_device] = runner
     return runner
@@ -180,25 +190,26 @@
     Returns: The value at the specified key.
     Throws: ActsConfigError if the value cannot be determined from the config.
     """
-    suffix = ''
-    if 'ota_map' in config:
-        if android_device.serial in config['ota_map']:
-            suffix = '_%s' % config['ota_map'][android_device.serial]
+    suffix = ""
+    if "ota_map" in config:
+        if android_device.serial in config["ota_map"]:
+            suffix = "_%s" % config["ota_map"][android_device.serial]
 
-    ota_package_key = '%s%s' % (key, suffix)
+    ota_package_key = "%s%s" % (key, suffix)
     if ota_package_key not in config:
-        if suffix != '':
+        if suffix != "":
             raise ActsConfigError(
-                'Asked for an OTA Update without specifying a required value. '
+                "Asked for an OTA Update without specifying a required value. "
                 '"ota_map" has entry {"%s": "%s"}, but there is no '
                 'corresponding entry {"%s":"/path/to/file"} found within the '
-                'ACTS config.' % (android_device.serial, suffix[1:],
-                                  ota_package_key))
+                "ACTS config." % (android_device.serial, suffix[1:], ota_package_key)
+            )
         else:
             raise ActsConfigError(
-                'Asked for an OTA Update without specifying a required value. '
+                "Asked for an OTA Update without specifying a required value. "
                 '"ota_map" does not exist or have a key for serial "%s", and '
                 'the default value entry "%s" cannot be found within the ACTS '
-                'config.' % (android_device.serial, ota_package_key))
+                "config." % (android_device.serial, ota_package_key)
+            )
 
     return config[ota_package_key]
diff --git a/src/antlion/libs/ota/ota_tools/adb_sideload_ota_tool.py b/src/antlion/libs/ota/ota_tools/adb_sideload_ota_tool.py
index 5b45241..f097f45 100644
--- a/src/antlion/libs/ota/ota_tools/adb_sideload_ota_tool.py
+++ b/src/antlion/libs/ota/ota_tools/adb_sideload_ota_tool.py
@@ -32,17 +32,16 @@
         super(AdbSideloadOtaTool, self).__init__(ignored_command)
 
     def update(self, ota_runner):
-        logging.info('Rooting adb')
+        logging.info("Rooting adb")
         ota_runner.android_device.root_adb()
-        logging.info('Rebooting to sideload')
-        ota_runner.android_device.adb.reboot('sideload')
+        logging.info("Rebooting to sideload")
+        ota_runner.android_device.adb.reboot("sideload")
         ota_runner.android_device.adb.wait_for_sideload()
-        logging.info('Sideloading ota package')
+        logging.info("Sideloading ota package")
         package_path = ota_runner.get_ota_package()
         logging.info('Running adb sideload with package "%s"' % package_path)
-        ota_runner.android_device.adb.sideload(
-            package_path, timeout=PUSH_TIMEOUT)
-        logging.info('Sideload complete. Waiting for device to come back up.')
+        ota_runner.android_device.adb.sideload(package_path, timeout=PUSH_TIMEOUT)
+        logging.info("Sideload complete. Waiting for device to come back up.")
         ota_runner.android_device.adb.wait_for_recovery()
         ota_runner.android_device.reboot(stop_at_lock_screen=True)
-        logging.info('Device is up. Update complete.')
+        logging.info("Device is up. Update complete.")
diff --git a/src/antlion/libs/ota/ota_tools/ota_tool_factory.py b/src/antlion/libs/ota/ota_tools/ota_tool_factory.py
index c889ddc..0eff707 100644
--- a/src/antlion/libs/ota/ota_tools/ota_tool_factory.py
+++ b/src/antlion/libs/ota/ota_tools/ota_tool_factory.py
@@ -41,10 +41,12 @@
         return _constructed_tools[ota_tool_class]
 
     if ota_tool_class not in _CONSTRUCTORS:
-        raise KeyError('Given Ota Tool class name does not match a known '
-                       'name. Found "%s". Expected any of %s. If this tool '
-                       'does exist, add it to the _CONSTRUCTORS dict in this '
-                       'module.' % (ota_tool_class, _CONSTRUCTORS.keys()))
+        raise KeyError(
+            "Given Ota Tool class name does not match a known "
+            'name. Found "%s". Expected any of %s. If this tool '
+            "does exist, add it to the _CONSTRUCTORS dict in this "
+            "module." % (ota_tool_class, _CONSTRUCTORS.keys())
+        )
 
     new_update_tool = _CONSTRUCTORS[ota_tool_class](command)
     _constructed_tools[ota_tool_class] = new_update_tool
diff --git a/src/antlion/libs/ota/ota_tools/update_device_ota_tool.py b/src/antlion/libs/ota/ota_tools/update_device_ota_tool.py
index 03d10c5..4bdde99 100644
--- a/src/antlion/libs/ota/ota_tools/update_device_ota_tool.py
+++ b/src/antlion/libs/ota/ota_tools/update_device_ota_tool.py
@@ -26,31 +26,35 @@
 # OTA Packages can be upwards of 1 GB. This may take some time to transfer over
 # USB 2.0. A/B devices must also complete the update in the background.
 UPDATE_TIMEOUT = 60 * 60
-UPDATE_LOCATION = '/data/ota_package/update.zip'
+UPDATE_LOCATION = "/data/ota_package/update.zip"
 
 
 class UpdateDeviceOtaTool(ota_tool.OtaTool):
     """Runs an OTA Update with system/update_engine/scripts/update_device.py."""
+
     def __init__(self, command):
         super(UpdateDeviceOtaTool, self).__init__(command)
 
         self.unzip_path = tempfile.mkdtemp()
         utils.unzip_maintain_permissions(self.command, self.unzip_path)
 
-        self.command = os.path.join(self.unzip_path, 'update_device.py')
+        self.command = os.path.join(self.unzip_path, "update_device.py")
 
     def update(self, ota_runner):
-        logging.info('Forcing adb to be in root mode.')
+        logging.info("Forcing adb to be in root mode.")
         ota_runner.android_device.root_adb()
-        update_command = 'python3 %s -s %s %s' % (
-            self.command, ota_runner.serial, ota_runner.get_ota_package())
-        logging.info('Running %s' % update_command)
+        update_command = "python3 %s -s %s %s" % (
+            self.command,
+            ota_runner.serial,
+            ota_runner.get_ota_package(),
+        )
+        logging.info("Running %s" % update_command)
         result = job.run(update_command, timeout=UPDATE_TIMEOUT)
-        logging.info('Output: %s' % result.stdout)
+        logging.info("Output: %s" % result.stdout)
 
-        logging.info('Rebooting device for update to go live.')
+        logging.info("Rebooting device for update to go live.")
         ota_runner.android_device.reboot(stop_at_lock_screen=True)
-        logging.info('Reboot sent.')
+        logging.info("Reboot sent.")
 
     def __del__(self):
         """Delete the unzipped update_device folder before ACTS exits."""
diff --git a/src/antlion/libs/ota/ota_updater.py b/src/antlion/libs/ota/ota_updater.py
index 1e434b3..6db9649 100644
--- a/src/antlion/libs/ota/ota_updater.py
+++ b/src/antlion/libs/ota/ota_updater.py
@@ -29,16 +29,17 @@
         android_devices: The android_devices in the test.
     """
     for ad in android_devices:
-        ota_runners[ad] = ota_runner_factory.create_from_configs(
-            user_params, ad)
+        ota_runners[ad] = ota_runner_factory.create_from_configs(user_params, ad)
 
 
 def _check_initialization(android_device):
     """Check if a given device was initialized."""
     if android_device not in ota_runners:
-        raise KeyError('Android Device with serial "%s" has not been '
-                       'initialized for OTA Updates. Did you forget to call'
-                       'ota_updater.initialize()?' % android_device.serial)
+        raise KeyError(
+            'Android Device with serial "%s" has not been '
+            "initialized for OTA Updates. Did you forget to call"
+            "ota_updater.initialize()?" % android_device.serial
+        )
 
 
 def update(android_device, ignore_update_errors=False):
@@ -60,8 +61,7 @@
         if ignore_update_errors:
             return
         android_device.log.error(e)
-        android_device.take_bug_report('ota_update',
-                                       utils.get_current_epoch_time())
+        android_device.take_bug_report("ota_update", utils.get_current_epoch_time())
         raise e
 
 
diff --git a/src/antlion/libs/proc/job.py b/src/antlion/libs/proc/job.py
index b17d904..c1cdc24 100644
--- a/src/antlion/libs/proc/job.py
+++ b/src/antlion/libs/proc/job.py
@@ -14,14 +14,9 @@
 
 import logging
 import os
-import sys
+import subprocess
 import time
 
-if os.name == 'posix' and sys.version_info[0] < 3:
-    import subprocess32 as subprocess
-else:
-    import subprocess
-
 
 class Error(Exception):
     """Indicates that a command failed, is fatal to the test unless caught."""
@@ -54,8 +49,9 @@
     def stdout(self):
         """String representation of standard output."""
         if not self._stdout_str:
-            self._stdout_str = self._raw_stdout.decode(encoding=self._encoding,
-                                                       errors='replace')
+            self._stdout_str = self._raw_stdout.decode(
+                encoding=self._encoding, errors="replace"
+            )
             self._stdout_str = self._stdout_str.strip()
         return self._stdout_str
 
@@ -63,19 +59,22 @@
     def stderr(self):
         """String representation of standard error."""
         if not self._stderr_str:
-            self._stderr_str = self._raw_stderr.decode(encoding=self._encoding,
-                                                       errors='replace')
+            self._stderr_str = self._raw_stderr.decode(
+                encoding=self._encoding, errors="replace"
+            )
             self._stderr_str = self._stderr_str.strip()
         return self._stderr_str
 
-    def __init__(self,
-                 command=[],
-                 stdout=bytes(),
-                 stderr=bytes(),
-                 exit_status=None,
-                 duration=0,
-                 did_timeout=False,
-                 encoding='utf-8'):
+    def __init__(
+        self,
+        command=[],
+        stdout=bytes(),
+        stderr=bytes(),
+        exit_status=None,
+        duration=0,
+        did_timeout=False,
+        encoding="utf-8",
+    ):
         """
         Args:
             command: The command that was run. This will be a list containing
@@ -98,18 +97,21 @@
         self.did_timeout = did_timeout
 
     def __repr__(self):
-        return ('job.Result(command=%r, stdout=%r, stderr=%r, exit_status=%r, '
-                'duration=%r, did_timeout=%r, encoding=%r)') % (
-                    self.command, self._raw_stdout, self._raw_stderr,
-                    self.exit_status, self.duration, self.did_timeout,
-                    self._encoding)
+        return (
+            "job.Result(command=%r, stdout=%r, stderr=%r, exit_status=%r, "
+            "duration=%r, did_timeout=%r, encoding=%r)"
+        ) % (
+            self.command,
+            self._raw_stdout,
+            self._raw_stderr,
+            self.exit_status,
+            self.duration,
+            self.did_timeout,
+            self._encoding,
+        )
 
 
-def run(command,
-        timeout=60,
-        ignore_status=False,
-        env=None,
-        io_encoding='utf-8'):
+def run(command, timeout=60, ignore_status=False, env=None, io_encoding="utf-8"):
     """Execute a command in a subproccess and return its output.
 
     Commands can be either shell commands (given as strings) or the
@@ -133,11 +135,13 @@
         Error: When the command had an error executing and ignore_status==False.
     """
     start_time = time.time()
-    proc = subprocess.Popen(command,
-                            env=env,
-                            stdout=subprocess.PIPE,
-                            stderr=subprocess.PIPE,
-                            shell=not isinstance(command, list))
+    proc = subprocess.Popen(
+        command,
+        env=env,
+        stdout=subprocess.PIPE,
+        stderr=subprocess.PIPE,
+        shell=not isinstance(command, list),
+    )
     # Wait on the process terminating
     timed_out = False
     out = bytes()
@@ -149,18 +153,19 @@
         proc.kill()
         proc.wait()
 
-    result = Result(command=command,
-                    stdout=out,
-                    stderr=err,
-                    exit_status=proc.returncode,
-                    duration=time.time() - start_time,
-                    encoding=io_encoding,
-                    did_timeout=timed_out)
+    result = Result(
+        command=command,
+        stdout=out,
+        stderr=err,
+        exit_status=proc.returncode,
+        duration=time.time() - start_time,
+        encoding=io_encoding,
+        did_timeout=timed_out,
+    )
     logging.debug(result)
 
     if timed_out:
-        logging.error("Command %s with %s timeout setting timed out", command,
-                      timeout)
+        logging.error("Command %s with %s timeout setting timed out", command, timeout)
         raise TimeoutError(result)
 
     if not ignore_status and proc.returncode != 0:
@@ -187,11 +192,13 @@
         A subprocess.Popen object representing the created subprocess.
 
     """
-    proc = subprocess.Popen(command,
-                            env=env,
-                            preexec_fn=os.setpgrp,
-                            shell=not isinstance(command, list),
-                            stdout=subprocess.PIPE,
-                            stderr=subprocess.STDOUT)
+    proc = subprocess.Popen(
+        command,
+        env=env,
+        preexec_fn=os.setpgrp,
+        shell=not isinstance(command, list),
+        stdout=subprocess.PIPE,
+        stderr=subprocess.STDOUT,
+    )
     logging.debug("command %s started with pid %s", command, proc.pid)
     return proc
diff --git a/src/antlion/libs/proc/process.py b/src/antlion/libs/proc/process.py
index 906be73..9a3bbcd 100644
--- a/src/antlion/libs/proc/process.py
+++ b/src/antlion/libs/proc/process.py
@@ -23,7 +23,7 @@
 import time
 from threading import Thread
 
-_on_windows = sys.platform == 'win32'
+_on_windows = sys.platform == "win32"
 
 
 class ProcessError(Exception):
@@ -54,23 +54,24 @@
         process, use Process.start().
         """
         # Split command string into list if shell=True is not specified
-        self._use_shell = kwargs.get('shell', False)
+        self._use_shell = kwargs.get("shell", False)
         if not self._use_shell and isinstance(command, str):
             command = shlex.split(command)
         self._command = command
         self._subprocess_kwargs = kwargs
         if _on_windows:
-            self._subprocess_kwargs['creationflags'] = (
-                subprocess.CREATE_NEW_PROCESS_GROUP)
+            self._subprocess_kwargs[
+                "creationflags"
+            ] = subprocess.CREATE_NEW_PROCESS_GROUP
         else:
-            self._subprocess_kwargs['start_new_session'] = True
+            self._subprocess_kwargs["start_new_session"] = True
         self._process = None
 
         self._listening_thread = None
         self._redirection_thread = None
         self._on_output_callback = lambda *args, **kw: None
         self._binary_output = False
-        self._on_terminate_callback = lambda *args, **kw: ''
+        self._on_terminate_callback = lambda *args, **kw: ""
 
         self._started = False
         self._stopped = False
@@ -117,7 +118,7 @@
     def start(self):
         """Starts the process's execution."""
         if self._started:
-            raise ProcessError('Process has already started.')
+            raise ProcessError("Process has already started.")
         self._started = True
         self._process = None
 
@@ -128,13 +129,13 @@
 
         while self._process is None:
             if time.time() > time_up_at:
-                raise OSError('Unable to open process!')
+                raise OSError("Unable to open process!")
 
         self._stopped = False
 
     @staticmethod
     def _get_timeout_left(timeout, start_time):
-        return max(.1, timeout - (time.time() - start_time))
+        return max(0.1, timeout - (time.time() - start_time))
 
     def is_running(self):
         """Checks that the underlying Popen process is still running
@@ -158,7 +159,7 @@
         """Kills the underlying process/process group. Implementation is
         platform-dependent."""
         if _on_windows:
-            subprocess.check_call('taskkill /F /T /PID %s' % self._process.pid)
+            subprocess.check_call("taskkill /F /T /PID %s" % self._process.pid)
         else:
             self.signal(signal.SIGKILL)
 
@@ -175,7 +176,7 @@
             kill_timeout: The amount of time to wait until killing the process.
         """
         if self._stopped:
-            raise ProcessError('Process is already being stopped.')
+            raise ProcessError("Process is already being stopped.")
         self._stopped = True
 
         try:
@@ -193,7 +194,7 @@
             sig: The signal to be sent.
         """
         if _on_windows:
-            raise ProcessError('Unable to call Process.signal on windows.')
+            raise ProcessError("Unable to call Process.signal on windows.")
 
         pgid = os.getpgid(self._process.pid)
         os.killpg(pgid, sig)
@@ -221,8 +222,7 @@
                     self._on_output_callback(data)
         else:
             while True:
-                line = self._process.stdout.readline().decode('utf-8',
-                                                              errors='replace')
+                line = self._process.stdout.readline().decode("utf-8", errors="replace")
 
                 if not line:
                     return
@@ -234,8 +234,7 @@
     def __start_process(command, **kwargs):
         """A convenient wrapper function for starting the process."""
         acts_logger = logging.getLogger()
-        acts_logger.debug(
-            'Starting command "%s" with kwargs %s', command, kwargs)
+        acts_logger.debug('Starting command "%s" with kwargs %s', command, kwargs)
         return subprocess.Popen(command, **kwargs)
 
     def _exec_loop(self):
@@ -249,27 +248,26 @@
         """
         command = self._command
         while True:
-            self._process = self.__start_process(command,
-                                                 stdout=subprocess.PIPE,
-                                                 stderr=subprocess.STDOUT,
-                                                 bufsize=1,
-                                                 **self._subprocess_kwargs)
+            self._process = self.__start_process(
+                command,
+                stdout=subprocess.PIPE,
+                stderr=subprocess.STDOUT,
+                bufsize=1,
+                **self._subprocess_kwargs,
+            )
             self._redirection_thread = Thread(target=self._redirect_output)
             self._redirection_thread.start()
             self._process.wait()
 
             if self._stopped:
-                logging.debug('The process for command %s was stopped.',
-                              command)
+                logging.debug("The process for command %s was stopped.", command)
                 break
             else:
-                logging.debug('The process for command %s terminated.',
-                              command)
+                logging.debug("The process for command %s terminated.", command)
                 # Wait for all output to be processed before sending
                 # _on_terminate_callback()
                 self._redirection_thread.join()
-                logging.debug('Beginning on_terminate_callback for %s.',
-                              command)
+                logging.debug("Beginning on_terminate_callback for %s.", command)
                 retry_value = self._on_terminate_callback(self._process)
                 if retry_value:
                     if not self._use_shell and isinstance(retry_value, str):
diff --git a/src/antlion/libs/test_binding/__init__.py b/src/antlion/libs/test_binding/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/libs/test_binding/__init__.py
+++ /dev/null
diff --git a/src/antlion/libs/test_binding/all_tests_decorator.py b/src/antlion/libs/test_binding/all_tests_decorator.py
deleted file mode 100644
index 906ac6b..0000000
--- a/src/antlion/libs/test_binding/all_tests_decorator.py
+++ /dev/null
@@ -1,27 +0,0 @@
-import inspect
-
-
-def for_all_tests(decorator):
-    """Applies a decorator to all tests within a test class.
-
-    Args:
-        decorator: The decorator to apply.
-
-    Returns:
-        The class decorator function.
-    """
-
-    def _decorate(decorated):
-        test_names = []
-        for name, value in inspect.getmembers(decorated,
-                                              predicate=inspect.isfunction):
-            if name.startswith("test_"):
-                test_names.append(name)
-
-        for test_name in test_names:
-            setattr(decorated, test_name,
-                    decorator(getattr(decorated, test_name)))
-
-        return decorated
-
-    return _decorate
diff --git a/src/antlion/libs/test_binding/binding.py b/src/antlion/libs/test_binding/binding.py
deleted file mode 100644
index df6387b..0000000
--- a/src/antlion/libs/test_binding/binding.py
+++ /dev/null
@@ -1,94 +0,0 @@
-from antlion import signals
-
-
-class Binding(object):
-    """Creates a binding for a test method with a decorator.
-
-    Python stores all functions as a variable bound to an object. When that
-    object is called it will execute the function logic. It is possible to
-    create a wrapper object around the real function object to perform custom
-    logic and store additional meta-data.
-
-    This object acts as a wrapper for test functions. It allows binding
-    additional test logic to a test.
-    """
-
-    def __init__(self, inner, arg_modifier=None, before=None, after=None,
-                 signal_modifier=None, instance_args=None):
-        """
-        Args:
-            inner: The inner method or other binding being bound to.
-            arg_modifier: A function of
-                (*args, **kwargs) => args kwargs that will modify the
-                arguments to pass to the bound target
-            before: A function of (*args, **kwargs) => None that will
-                be called before the bound target.
-            after: A function of (result, *args, **kwargs) => None
-                that will be called after the bound target.
-            signal_modifier:  A function of
-                (signal, *args, **kwargs) => signal that will be
-                called before the signal is sent to modify the signal to send.
-        """
-        self.instance_args = instance_args or []
-        self.arg_modifier = arg_modifier
-        self.signal_modifier = signal_modifier
-        self.after = after
-        self.before = before
-        self.inner = inner
-        self.__name__ = inner.__name__
-
-    def __get__(self, instance, owner):
-        """Called when a new isntance of the test class is created.
-
-        When a new instance of a class is created all method bindings must
-        be bound as instance bindings. This transforms the function call
-        signature to be func(self, *args, **kwargs) to func(*args, **kwargs).
-        The newly created binding handles inserting the self variable so the
-        caller does not have to.
-
-        This binding needs to do similar logic by creating a new binding for
-        the instance that memorizes the instance as a passed in arg.
-        """
-        return Binding(self.inner,
-                       arg_modifier=self.arg_modifier,
-                       before=self.before,
-                       after=self.after,
-                       signal_modifier=self.signal_modifier,
-                       instance_args=[instance] + self.instance_args)
-
-    def __call__(self, *args, **kwargs):
-        """Called when the test is executed."""
-        full_args = self.instance_args + list(args)
-
-        try:
-            if self.arg_modifier:
-                full_args, kwargs = self.arg_modifier(self.inner, *full_args,
-                                                      **kwargs)
-
-            if self.before:
-                self.before(self.inner, *full_args, **kwargs)
-
-            result = 'UNKNOWN ERROR'
-            try:
-                result = self.inner(*full_args, **kwargs)
-            finally:
-                if self.after:
-                    self.after(self.inner, result, *full_args, **kwargs)
-
-            if result or result is None:
-                new_signal = signals.TestPass('')
-            else:
-                new_signal = signals.TestFailure('')
-        except signals.TestSignal as signal:
-            new_signal = signal
-
-        if self.signal_modifier:
-            new_signal = self.signal_modifier(self.inner, new_signal,
-                                              *full_args,
-                                              **kwargs)
-
-        raise new_signal
-
-    def __getattr__(self, item):
-        """A simple pass through for any variable we do not known about."""
-        return getattr(self.inner, item)
diff --git a/src/antlion/libs/uicd/__init__.py b/src/antlion/libs/uicd/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/libs/uicd/__init__.py
+++ /dev/null
diff --git a/src/antlion/libs/uicd/uicd_cli.py b/src/antlion/libs/uicd/uicd_cli.py
deleted file mode 100644
index 3ca9bf4..0000000
--- a/src/antlion/libs/uicd/uicd_cli.py
+++ /dev/null
@@ -1,145 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import shutil
-import tempfile
-
-from antlion import logger
-from antlion.libs.proc import job
-
-_UICD_JAR_CMD = 'java -jar %s/uicd-commandline.jar'
-_UNZIP_CMD = 'tar -xzf %s -C %s'
-
-
-class UicdError(Exception):
-    """Raised for exceptions that occur in UIConductor-related tasks"""
-
-
-class UicdCli(object):
-    """Provides an interface for running UIConductor (Uicd) workflows under its
-    CLI.
-
-    This class does not handle workflow creation, which requires the Uicd
-    frontend.
-    """
-    def __init__(self, uicd_zip, workflow_paths, log_path=None):
-        """Creates a UicdCli object. Extracts the required uicd-cli binaries.
-
-        Args:
-            uicd_zip: The path to uicd_cli.tar.gz
-            workflow_paths: List of paths to uicd workflows and/or directories
-                containing them.
-            log_path: Directory for storing logs generated by Uicd.
-        """
-        # This is done so unit tests can cache the mocked shutil.rmtree value
-        # and call it on __del__ when the patch has been lifted.
-        self._rm_tmpdir = shutil.rmtree
-
-        self._uicd_zip = uicd_zip[0] if isinstance(uicd_zip, list) else uicd_zip
-        self._uicd_path = tempfile.mkdtemp(prefix='uicd')
-        self._log_path = log_path
-        if self._log_path:
-            os.makedirs(self._log_path, exist_ok=True)
-        self._log = logger.create_tagged_trace_logger(tag='Uicd')
-        self._set_workflows(workflow_paths)
-        self._setup_cli()
-
-    def _set_workflows(self, workflow_paths):
-        """Set up a dictionary that maps workflow name to its file location.
-        This allows the user to specify workflows to run without having to
-        provide the full path.
-
-        Args:
-            workflow_paths: List of paths to uicd workflows and/or directories
-                containing them.
-
-        Raises:
-            UicdError if two or more Uicd workflows share the same file name
-        """
-        if isinstance(workflow_paths, str):
-            workflow_paths = [workflow_paths]
-
-        # get a list of workflow files from specified paths
-        def _raise(e):
-            raise e
-        workflow_files = []
-        for path in workflow_paths:
-            if os.path.isfile(path):
-                workflow_files.append(path)
-            else:
-                for (root, _, files) in os.walk(path, onerror=_raise):
-                    for file in files:
-                        workflow_files.append(os.path.join(root, file))
-
-        # populate the dictionary
-        self._workflows = {}
-        for path in workflow_files:
-            workflow_name = os.path.basename(path)
-            if workflow_name in self._workflows.keys():
-                raise UicdError('Uicd workflows may not share the same name.')
-            self._workflows[workflow_name] = path
-
-    def _setup_cli(self):
-        """Extract tar from uicd_zip and place unzipped files in uicd_path.
-
-        Raises:
-            Exception if the extraction fails.
-        """
-        self._log.debug('Extracting uicd-cli binaries from %s' % self._uicd_zip)
-        unzip_cmd = _UNZIP_CMD % (self._uicd_zip, self._uicd_path)
-        try:
-            job.run(unzip_cmd.split())
-        except job.Error:
-            self._log.exception('Failed to extract uicd-cli binaries.')
-            raise
-
-    def run(self, serial, workflows, timeout=120):
-        """Run specified workflows on the UIConductor CLI.
-
-        Args:
-            serial: Device serial
-            workflows: List or str of workflows to run.
-            timeout: Number seconds to wait for command to finish.
-        """
-        base_cmd = _UICD_JAR_CMD % self._uicd_path
-        if isinstance(workflows, str):
-            workflows = [workflows]
-        for workflow_name in workflows:
-            self._log.info('Running workflow "%s"' % workflow_name)
-            if workflow_name in self._workflows:
-                args = '-d %s -i %s' % (serial, self._workflows[workflow_name])
-            else:
-                self._log.error(
-                    'The workflow "%s" does not exist.' % workflow_name)
-                continue
-            if self._log_path:
-                args = '%s -o %s' % (args, self._log_path)
-            cmd = '%s %s' % (base_cmd, args)
-            try:
-                result = job.run(cmd.split(), timeout=timeout)
-            except job.Error:
-                self._log.exception(
-                    'Failed to run workflow "%s"' % workflow_name)
-                continue
-            if result.stdout:
-                stdout_split = result.stdout.splitlines()
-                if len(stdout_split) > 2:
-                    self._log.debug('Uicd logs stored at %s' % stdout_split[2])
-
-    def __del__(self):
-        """Delete the temp directory to Uicd CLI binaries upon ACTS exit."""
-        self._rm_tmpdir(self._uicd_path)
diff --git a/src/antlion/libs/utils/multithread.py b/src/antlion/libs/utils/multithread.py
deleted file mode 100644
index 31baaf7..0000000
--- a/src/antlion/libs/utils/multithread.py
+++ /dev/null
@@ -1,120 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import concurrent.futures
-import logging
-
-def task_wrapper(task):
-    """Task wrapper for multithread_func
-
-    Args:
-        task[0]: function to be wrapped.
-        task[1]: function args.
-
-    Returns:
-        Return value of wrapped function call.
-    """
-    func = task[0]
-    params = task[1]
-    return func(*params)
-
-
-def run_multithread_func_async(log, task):
-    """Starts a multi-threaded function asynchronously.
-
-    Args:
-        log: log object.
-        task: a task to be executed in parallel.
-
-    Returns:
-        Future object representing the execution of the task.
-    """
-    executor = concurrent.futures.ThreadPoolExecutor(max_workers=1)
-    try:
-        future_object = executor.submit(task_wrapper, task)
-    except Exception as e:
-        log.error("Exception error %s", e)
-        raise
-    return future_object
-
-
-def run_multithread_func(log, tasks):
-    """Run multi-thread functions and return results.
-
-    Args:
-        log: log object.
-        tasks: a list of tasks to be executed in parallel.
-
-    Returns:
-        results for tasks.
-    """
-    MAX_NUMBER_OF_WORKERS = 10
-    number_of_workers = min(MAX_NUMBER_OF_WORKERS, len(tasks))
-    executor = concurrent.futures.ThreadPoolExecutor(
-        max_workers=number_of_workers)
-    if not log: log = logging
-    try:
-        results = list(executor.map(task_wrapper, tasks))
-    except Exception as e:
-        log.error("Exception error %s", e)
-        raise
-    executor.shutdown()
-    if log:
-        log.info("multithread_func %s result: %s",
-                 [task[0].__name__ for task in tasks], results)
-    return results
-
-
-def multithread_func(log, tasks):
-    """Multi-thread function wrapper.
-
-    Args:
-        log: log object.
-        tasks: tasks to be executed in parallel.
-
-    Returns:
-        True if all tasks return True.
-        False if any task return False.
-    """
-    results = run_multithread_func(log, tasks)
-    for r in results:
-        if not r:
-            return False
-    return True
-
-
-def multithread_func_and_check_results(log, tasks, expected_results):
-    """Multi-thread function wrapper.
-
-    Args:
-        log: log object.
-        tasks: tasks to be executed in parallel.
-        expected_results: check if the results from tasks match expected_results.
-
-    Returns:
-        True if expected_results are met.
-        False if expected_results are not met.
-    """
-    return_value = True
-    results = run_multithread_func(log, tasks)
-    log.info("multithread_func result: %s, expecting %s", results,
-             expected_results)
-    for task, result, expected_result in zip(tasks, results, expected_results):
-        if result != expected_result:
-            logging.info("Result for task %s is %s, expecting %s", task[0],
-                         result, expected_result)
-            return_value = False
-    return return_value
diff --git a/src/antlion/libs/utils/timer.py b/src/antlion/libs/utils/timer.py
deleted file mode 100644
index 2350aa9..0000000
--- a/src/antlion/libs/utils/timer.py
+++ /dev/null
@@ -1,132 +0,0 @@
-"""A simple timer class to keep record of the elapsed time."""
-
-import time
-
-
-class TimeRecorder(object):
-    """Main class to keep time records.
-
-    A timer record contains an ID, a start timestamp, and an optional stop
-    timestamps. The elapsed time calculated as stop - start.
-    If the stop timestamp is not set, current system time will be used.
-
-    Example usage:
-    >>> timer = TimeRecorder()
-    >>> # start a single timer, ID = 'lunch'
-    >>> timer.start_timer('lunch')
-    >>> # start two timers at the same time
-    >>> timer.start_timer(['salad', 'dessert'])
-    >>> # stop a single timer
-    >>> timer.stop_timer('salad')
-    >>> # get elapsed time of all timers
-    >>> timer.elapsed()
-    """
-
-    def __init__(self):
-        self.recorder = dict()
-
-    def start_timer(self, record_ids='Default', force=False):
-        """Start one or more timer.
-
-        Starts one or more timer at current system time with the record ID
-        specified in record_ids. Will overwrite/restart existing timer.
-
-        Args:
-            record_ids: timer record IDs. Can be a string or a list of strings.
-                        If the record ID is a list, will start multiple timers
-                        at the same time.
-            force: Force update the timer's start time if the specified timer
-                   has already started. By default we won't update started timer
-                   again.
-
-        Returns:
-            Number of timer started.
-        """
-        if isinstance(record_ids, str):
-            record_ids = [record_ids]
-        start_time = time.time()
-        for rec in record_ids:
-            if force or rec not in self.recorder:
-                self.recorder[rec] = [start_time, None]
-        return len(record_ids)
-
-    def stop_timer(self, record_ids=None, force=False):
-        """Stop one or more timer.
-
-        Stops one or more timer at current system time.
-
-        Args:
-            record_ids: timer record IDs. Can be a string or a list of strings.
-                        If the record ID is a list, will stop multiple timers at
-                        the same time. By default, it will stop all timers.
-            force: Force update the timer's stop time if the specified timer has
-                   already stopped. By default we won't update stopped timer
-                   again.
-
-        Returns:
-            Number of timer stopped.
-        """
-        # stop all record if id is not provided.
-        if record_ids is None:
-            record_ids = self.recorder.keys()
-        elif isinstance(record_ids, str):
-            record_ids = [record_ids]
-        stop_time = time.time()
-        num_rec = 0
-        for rec in record_ids:
-            if rec in self.recorder:
-                if force or self.recorder[rec][1] is None:
-                    self.recorder[rec][1] = stop_time
-                    num_rec += 1
-        return num_rec
-
-    def elapsed(self, record_ids=None):
-        """Return elapsed time in seconds.
-
-        For records with no stop time, will calculate based on the current
-        system time.
-
-        Args:
-            record_ids: timer record IDs. Can be a string or a list of strings.
-                        If the record ID is a list, will compute the elapsed
-                        time for all specified timers. Default value (None)
-                        calculates elapsed time for all existing timers.
-
-        Returns:
-            The elapsed time. If the record_ids is a string, will return the
-            time in seconds as float type. If the record_ids is a list or
-            default (None), will return a dict of the <record id, elapsed time>.
-        """
-        single_record = False
-        if record_ids is None:
-            record_ids = self.recorder.keys()
-        elif isinstance(record_ids, str):
-            record_ids = [record_ids]
-            single_record = True
-        results = dict()
-        curr_time = time.time()
-        for rec in record_ids:
-            if rec in self.recorder:
-                if self.recorder[rec][1] is not None:
-                    results[rec] = self.recorder[rec][1] - self.recorder[rec][0]
-                else:
-                    results[rec] = curr_time - self.recorder[rec][0]
-        if not results:  # no valid record found
-            return None
-        elif single_record and len(record_ids) == 1:
-            # only 1 record is requested, return results directly
-            return results[record_ids[0]]
-        else:
-            return results  # multiple records, return a dict.
-
-    def clear(self, record_ids=None):
-        """Clear existing time records."""
-        if record_ids is None:
-            self.recorder = dict()
-            return
-
-        if isinstance(record_ids, str):
-            record_ids = [record_ids]
-        for rec in record_ids:
-            if rec in self.recorder:
-                del self.recorder[rec]
diff --git a/src/antlion/libs/version_selector.py b/src/antlion/libs/version_selector.py
deleted file mode 100644
index 2e55eb5..0000000
--- a/src/antlion/libs/version_selector.py
+++ /dev/null
@@ -1,291 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import bisect
-from collections import namedtuple
-import inspect
-import numbers
-
-
-def _fully_qualified_name(func):
-    """Returns the fully-qualified name of a function.
-
-    Note: __qualname__ is not the fully qualified name. It is the the fully
-          qualified name without the module name.
-
-    See: https://www.python.org/dev/peps/pep-3155/#naming-choice
-    """
-    return '%s:%s' % (func.__module__, func.__qualname__)
-
-
-_FrameInfo = namedtuple('_FrameInfo', ['frame', 'filename', 'lineno',
-                                       'function', 'code_context', 'index'])
-
-
-def _inspect_stack():
-    """Returns named tuple for each tuple returned by inspect.stack().
-
-    For Python3.4 and earlier, which returns unnamed tuples for inspect.stack().
-
-    Returns:
-        list of _FrameInfo named tuples representing stack frame info.
-    """
-    return [_FrameInfo(*info) for info in inspect.stack()]
-
-
-def set_version(get_version_func, min_version, max_version):
-    """Returns a decorator returning a VersionSelector containing all versions
-    of the decorated func.
-
-    Args:
-        get_version_func: The lambda that returns the version level based on the
-                          arguments sent to versioned_func
-        min_version: The minimum API level for calling versioned_func.
-        max_version: The maximum API level for calling versioned_func.
-
-    Raises:
-        SyntaxError if get_version_func is different between versioned funcs.
-
-    Returns:
-        A VersionSelector containing all versioned calls to the decorated func.
-    """
-    func_owner_variables = None
-    for frame_info in _inspect_stack():
-        if frame_info.function == '<module>':
-            # We've reached the end of the most recently imported module in our
-            # stack without finding a class first. This indicates that the
-            # decorator is on a module-level function.
-            func_owner_variables = frame_info.frame.f_locals
-            break
-        elif '__qualname__' in frame_info.frame.f_locals:
-            # __qualname__ appears in stack frames of objects that have
-            # yet to be interpreted. Here we can guarantee that the object in
-            # question is the innermost class that contains the function.
-            func_owner_variables = frame_info.frame.f_locals
-            break
-
-    def decorator(func):
-        if isinstance(func, (staticmethod, classmethod)):
-            raise SyntaxError('@staticmethod and @classmethod decorators must '
-                              'be placed before the versioning decorator.')
-        func_name = func.__name__
-
-        if func_name in func_owner_variables:
-            # If the function already exists within the class/module, get it.
-            version_selector = func_owner_variables[func_name]
-            if isinstance(version_selector, (staticmethod, classmethod)):
-                # If the function was also decorated with @staticmethod or
-                # @classmethod, the version_selector will be stored in __func__.
-                version_selector = version_selector.__func__
-            if not isinstance(version_selector, _VersionSelector):
-                raise SyntaxError('The previously defined function "%s" is not '
-                                  'decorated with a versioning decorator.' %
-                                  version_selector.__qualname__)
-            if (version_selector.comparison_func_name !=
-                    _fully_qualified_name(get_version_func)):
-                raise SyntaxError('Functions of the same name must be decorated'
-                                  ' with the same versioning decorator.')
-        else:
-            version_selector = _VersionSelector(get_version_func)
-
-        version_selector.add_fn(func, min_version, max_version)
-        return version_selector
-
-    return decorator
-
-
-class _VersionSelector(object):
-    """A class that maps API levels to versioned functions for that API level.
-
-    Attributes:
-        entry_list: A sorted list of Entries that define which functions to call
-                    for a given API level.
-    """
-
-    class ListWrap(object):
-        """This class wraps a list of VersionSelector.Entry objects.
-
-        This is required to make the bisect functions work, since the underlying
-        implementation of those functions do not use __cmp__, __lt__, __gt__,
-        etc. because they are not implemented in Python.
-
-        See: https://docs.python.org/3/library/bisect.html#other-examples
-        """
-
-        def __init__(self, entry_list):
-            self.list = entry_list
-
-        def __len__(self):
-            return len(self.list)
-
-        def __getitem__(self, index):
-            return self.list[index].level
-
-    class Entry(object):
-        def __init__(self, level, func, direction):
-            """Creates an Entry object.
-
-            Args:
-                level: The API level for this point.
-                func: The function to call.
-                direction: (-1, 0 or 1) the  direction the ray from this level
-                           points towards.
-            """
-            self.level = level
-            self.func = func
-            self.direction = direction
-
-    def __init__(self, version_func):
-        """Creates a VersionSelector object.
-
-        Args:
-            version_func: The function that converts the arguments into an
-                          integer that represents the API level.
-        """
-        self.entry_list = list()
-        self.get_version = version_func
-        self.instance = None
-        self.comparison_func_name = _fully_qualified_name(version_func)
-
-    def __name__(self):
-        if len(self.entry_list) > 0:
-            return self.entry_list[0].func.__name__
-        return '%s<%s>' % (self.__class__.__name__, self.get_version.__name__)
-
-    def print_ranges(self):
-        """Returns all ranges as a string.
-
-        The string is formatted as '[min_a, max_a], [min_b, max_b], ...'
-        """
-        ranges = []
-        min_boundary = None
-        for entry in self.entry_list:
-            if entry.direction == 1:
-                min_boundary = entry.level
-            elif entry.direction == 0:
-                ranges.append(str([entry.level, entry.level]))
-            else:
-                ranges.append(str([min_boundary, entry.level]))
-        return ', '.join(ranges)
-
-    def add_fn(self, fn, min_version, max_version):
-        """Adds a function to the VersionSelector for the given API range.
-
-        Args:
-            fn: The function to call when the API level is met.
-            min_version: The minimum version level for calling this function.
-            max_version: The maximum version level for calling this function.
-
-        Raises:
-            ValueError if min_version > max_version or another versioned
-                       function overlaps this new range.
-        """
-        if min_version > max_version:
-            raise ValueError('The minimum API level must be greater than the'
-                             'maximum API level.')
-        insertion_index = bisect.bisect_left(
-            _VersionSelector.ListWrap(self.entry_list), min_version)
-        if insertion_index != len(self.entry_list):
-            right_neighbor = self.entry_list[insertion_index]
-            if not (min_version <= max_version < right_neighbor.level and
-                    right_neighbor.direction != -1):
-                raise ValueError('New range overlaps another API level. '
-                                 'New range: %s, Existing ranges: %s' %
-                                 ([min_version, max_version],
-                                  self.print_ranges()))
-        if min_version == max_version:
-            new_entry = _VersionSelector.Entry(min_version, fn, direction=0)
-            self.entry_list.insert(insertion_index, new_entry)
-        else:
-            # Inserts the 2 entries into the entry list at insertion_index.
-            self.entry_list[insertion_index:insertion_index] = [
-                _VersionSelector.Entry(min_version, fn, direction=1),
-                _VersionSelector.Entry(max_version, fn, direction=-1)]
-
-    def __call__(self, *args, **kwargs):
-        """Calls the proper versioned function for the given API level.
-
-        This is a magic python function that gets called whenever parentheses
-        immediately follow the attribute access (e.g. obj.version_selector()).
-
-        Args:
-            *args, **kwargs: The arguments passed into this call. These
-                             arguments are intended for the decorated function.
-
-        Returns:
-            The result of the called function.
-        """
-        if self.instance is not None:
-            # When the versioned function is a classmethod, the class is passed
-            # into __call__ as the first argument.
-            level = self.get_version(self.instance, *args, **kwargs)
-        else:
-            level = self.get_version(*args, **kwargs)
-        if not isinstance(level, numbers.Number):
-            kwargs_out = []
-            for key, value in kwargs.items():
-                kwargs_out.append('%s=%s' % (key, str(value)))
-            args_out = str(list(args))[1:-1]
-            kwargs_out = ', '.join(kwargs_out)
-            raise ValueError(
-                'The API level the function %s returned %s for the arguments '
-                '(%s). This function must return a number.' %
-                (self.get_version.__qualname__, repr(level),
-                 ', '.join(i for i in [args_out, kwargs_out] if i)))
-
-        index = bisect.bisect_left(_VersionSelector.ListWrap(self.entry_list),
-                                   level)
-
-        # Check to make sure the function being called is within the API range
-        if index == len(self.entry_list):
-            raise NotImplementedError('No function %s exists for API level %s'
-                                      % (self.entry_list[0].func.__qualname__,
-                                         level))
-        closest_entry = self.entry_list[index]
-        if (closest_entry.direction == 0 and closest_entry.level != level or
-                closest_entry.direction == 1 and closest_entry.level > level or
-                closest_entry.direction == -1 and closest_entry.level < level):
-            raise NotImplementedError('No function %s exists for API level %s'
-                                      % (self.entry_list[0].func.__qualname__,
-                                         level))
-
-        func = self.entry_list[index].func
-        if self.instance is None:
-            # __get__ was not called, so the function is module-level.
-            return func(*args, **kwargs)
-
-        return func(self.instance, *args, **kwargs)
-
-    def __get__(self, instance, owner):
-        """Gets the instance and owner whenever this function is obtained.
-
-        These arguments will be used to pass in the self to instance methods.
-        If the function is marked with @staticmethod or @classmethod, those
-        decorators will handle removing self or getting the class, respectively.
-
-        Note that this function will NOT be called on module-level functions.
-
-        Args:
-            instance: The instance of the object this function is being called
-                      from. If this function is static or a classmethod,
-                      instance will be None.
-            owner: The object that owns this function. This is the class object
-                   that defines the function.
-
-        Returns:
-            self, this VersionSelector instance.
-        """
-        self.instance = instance
-        return self
diff --git a/src/antlion/libs/yaml_writer.py b/src/antlion/libs/yaml_writer.py
index 8c710e1..33c349f 100644
--- a/src/antlion/libs/yaml_writer.py
+++ b/src/antlion/libs/yaml_writer.py
@@ -18,25 +18,27 @@
 import yaml
 
 # Allow yaml to dump OrderedDict
-yaml.add_representer(collections.OrderedDict,
-                     lambda dumper, data: dumper.represent_dict(data),
-                     Dumper=yaml.SafeDumper)
+yaml.add_representer(
+    collections.OrderedDict,
+    lambda dumper, data: dumper.represent_dict(data),
+    Dumper=yaml.SafeDumper,
+)
 
 
 def _str_representer(dumper, data):
     if len(data.splitlines()) > 1:
-        data = '\n'.join(line.replace('\t', '    ').rstrip()
-                         for line in data.splitlines())
-        return dumper.represent_scalar('tag:yaml.org,2002:str', data, style='|')
-    return dumper.represent_scalar('tag:yaml.org,2002:str', data)
+        data = "\n".join(
+            line.replace("\t", "    ").rstrip() for line in data.splitlines()
+        )
+        return dumper.represent_scalar("tag:yaml.org,2002:str", data, style="|")
+    return dumper.represent_scalar("tag:yaml.org,2002:str", data)
 
 
 # Automatically convert multiline strings into block literals
 yaml.add_representer(str, _str_representer, Dumper=yaml.SafeDumper)
 
-
 _DUMP_KWARGS = dict(explicit_start=True, allow_unicode=True, indent=4)
-if yaml.__version__ >= '5.1':
+if yaml.__version__ >= "5.1":
     _DUMP_KWARGS.update(sort_keys=False)
 
 
diff --git a/src/antlion/logger.py b/src/antlion/logger.py
index 599e08b..1d18ad8 100755
--- a/src/antlion/logger.py
+++ b/src/antlion/logger.py
@@ -84,8 +84,8 @@
     def format(self, record):
         colored_record = copy(record)
         level_name = colored_record.levelname
-        style = LOG_LEVELS[level_name]['style']
-        formatted_level_name = '%s%s%s' % (style, level_name, Style.RESET)
+        style = LOG_LEVELS[level_name]["style"]
+        formatted_level_name = "%s%s%s" % (style, level_name, Style.RESET)
         colored_record.levelname = formatted_level_name
         return super().format(colored_record)
 
@@ -100,10 +100,10 @@
         An iterable of date and time elements in the order of month, day, hour,
         minute, second, microsecond.
     """
-    date, time = t.split(' ')
-    year, month, day = date.split('-')
-    h, m, s = time.split(':')
-    s, ms = s.split('.')
+    date, time = t.split(" ")
+    year, month, day = date.split("-")
+    h, m, s = time.split(":")
+    s, ms = s.split(".")
     return year, month, day, h, m, s, ms
 
 
@@ -201,19 +201,20 @@
     logging.log_path = log_path
     log_styles = [
         LogStyles.LOG_INFO + LogStyles.TO_STDOUT,
-        LogStyles.DEFAULT_LEVELS + LogStyles.TESTCASE_LOG
+        LogStyles.DEFAULT_LEVELS + LogStyles.TESTCASE_LOG,
     ]
     terminal_format = log_line_format
     if prefix:
         terminal_format = "[{}] {}".format(prefix, log_line_format)
-    stream_formatter = ColoredLogFormatter(terminal_format,
-                                           log_line_time_format)
+    stream_formatter = ColoredLogFormatter(terminal_format, log_line_time_format)
     file_formatter = logging.Formatter(log_line_format, log_line_time_format)
-    log = log_stream.create_logger('test_run',
-                                   '',
-                                   log_styles=log_styles,
-                                   stream_format=stream_formatter,
-                                   file_format=file_formatter)
+    log = log_stream.create_logger(
+        "test_run",
+        "",
+        log_styles=log_styles,
+        stream_format=stream_formatter,
+        file_format=file_formatter,
+    )
     log.setLevel(logging.DEBUG)
     _enable_additional_log_levels()
 
@@ -221,7 +222,7 @@
 def _enable_additional_log_levels():
     """Enables logging levels used for tracing tests and debugging devices."""
     for log_type, log_data in LOG_LEVELS.items():
-        logging.addLevelName(log_data['level'], log_type)
+        logging.addLevelName(log_data["level"], log_type)
 
 
 def kill_test_logger(logger):
@@ -248,8 +249,7 @@
     try:
         os.symlink(actual_path, link_path)
     except OSError:
-        logging.warning('Failed to create symlink to latest logs dir.',
-                        exc_info=True)
+        logging.warning("Failed to create symlink to latest logs dir.", exc_info=True)
 
 
 def setup_test_logger(log_path, prefix=None):
@@ -277,13 +277,14 @@
         A string representing the same time as input timestamp, but without
         special characters.
     """
-    norm_tp = log_line_timestamp.replace(' ', '_')
-    norm_tp = norm_tp.replace(':', '-')
+    norm_tp = log_line_timestamp.replace(" ", "_")
+    norm_tp = norm_tp.replace(":", "-")
     return norm_tp
 
 
 class LoggerAdapter(logging.LoggerAdapter):
     """A LoggerAdapter class that takes in a lambda for transforming logs."""
+
     def __init__(self, logging_lambda):
         self.logging_lambda = logging_lambda
         super(LoggerAdapter, self).__init__(logging.getLogger(), {})
@@ -302,7 +303,7 @@
     return tracelogger.TraceLogger(LoggerAdapter(logging_lambda))
 
 
-def create_tagged_trace_logger(tag=''):
+def create_tagged_trace_logger(tag=""):
     """Returns a logger that logs each line with the given prefix.
 
     Args:
@@ -311,7 +312,8 @@
 
             <TESTBED> <TIME> <LOG_LEVEL> [tag123] logged message
     """
+
     def logging_lambda(msg):
-        return '[%s] %s' % (tag, msg)
+        return "[%s] %s" % (tag, msg)
 
     return create_logger(logging_lambda)
diff --git a/src/antlion/net.py b/src/antlion/net.py
new file mode 100644
index 0000000..6f56703
--- /dev/null
+++ b/src/antlion/net.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python3
+#
+# Copyright 2023 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import errno
+import time
+import socket
+
+from typing import Optional
+
+
+def wait_for_port(host: str, port: int, timeout_sec: int = 5) -> None:
+    """Wait for the host to start accepting connections on the port.
+
+    Some services take some time to start. Call this after launching the service
+    to avoid race conditions.
+
+    Args:
+        host: IP of the running service.
+        port: Port of the running service.
+        timeout_sec: Seconds to wait until raising TimeoutError
+
+    Raises:
+        TimeoutError: when timeout_sec has expired without a successful
+            connection to the service
+    """
+    last_error: Optional[OSError] = None
+    timeout = time.perf_counter() + timeout_sec
+
+    while True:
+        try:
+            time_left = max(timeout - time.perf_counter(), 0)
+            with socket.create_connection((host, port), timeout=time_left):
+                return
+        except ConnectionRefusedError as e:
+            # Occurs when the host is online but not ready to accept connections
+            # yet; wait to see if the host becomes ready.
+            last_error = e
+        except socket.timeout as e:
+            # socket.timeout was aliased to TimeoutError in Python 3.10.
+            last_error = e
+        except OSError as e:
+            if e.errno == errno.EHOSTUNREACH:
+                # No route to host. Occurs when the interface to the host is
+                # torn down; wait to see if the interface comes back.
+                last_error = e
+            else:
+                # Unexpected error
+                raise e
+
+        if time.perf_counter() >= timeout:
+            raise TimeoutError(
+                f"Waited over {timeout_sec}s for the service to start "
+                f"accepting connections at {host}:{port}"
+            ) from last_error
diff --git a/src/antlion/records.py b/src/antlion/records.py
index aee2385..1c7ad23 100644
--- a/src/antlion/records.py
+++ b/src/antlion/records.py
@@ -44,15 +44,15 @@
         See MoblyTestSummaryWriter.dump for documentation.
         """
         new_content = collections.OrderedDict(copy.deepcopy(content))
-        new_content['Type'] = entry_type.value
-        new_content.move_to_end('Type', last=False)
+        new_content["Type"] = entry_type.value
+        new_content.move_to_end("Type", last=False)
         # Both user code and Mobly code can trigger this dump, hence the lock.
         with self._lock:
             # For Python3, setting the encoding on yaml.safe_dump does not work
             # because Python3 file descriptors set an encoding by default, which
             # PyYAML uses instead of the encoding on yaml.safe_dump. So, the
             # encoding has to be set on the open call instead.
-            with io.open(self._path, 'a', encoding='utf-8') as f:
+            with io.open(self._path, "a", encoding="utf-8") as f:
                 # Use safe_dump here to avoid language-specific tags in final
                 # output.
                 yaml_writer.safe_dump(new_content, f)
@@ -95,8 +95,7 @@
         Sets the begin_time of this record.
         """
         super().test_begin()
-        self.log_begin_time = logger.epoch_to_log_line_timestamp(
-            self.begin_time)
+        self.log_begin_time = logger.epoch_to_log_line_timestamp(self.begin_time)
 
     def _test_end(self, result, e):
         """Class internal function to signal the end of a test case execution.
@@ -109,8 +108,7 @@
         """
         super()._test_end(result, e)
         if self.end_time:
-            self.log_end_time = logger.epoch_to_log_line_timestamp(
-                self.end_time)
+            self.log_end_time = logger.epoch_to_log_line_timestamp(self.end_time)
 
     def to_dict(self):
         """Gets a dictionary representing the content of this class.
@@ -130,8 +128,7 @@
         d[TestResultEnums.RECORD_EXTRAS] = self.extras
         d[TestResultEnums.RECORD_DETAILS] = self.details
         d[TestResultEnums.RECORD_EXTRA_ERRORS] = {
-            key: value.to_dict()
-            for (key, value) in self.extra_errors.items()
+            key: value.to_dict() for (key, value) in self.extra_errors.items()
         }
         d[TestResultEnums.RECORD_STACKTRACE] = self.stacktrace
         return d
@@ -181,8 +178,7 @@
             A TestResult instance that's the sum of two TestResult instances.
         """
         if not isinstance(r, MoblyTestResult):
-            raise TypeError("Operand %s of type %s is not a TestResult." %
-                            (r, type(r)))
+            raise TypeError("Operand %s of type %s is not a TestResult." % (r, type(r)))
         sum_result = TestResult()
         for name in sum_result.__dict__:
             r_value = getattr(r, name)
@@ -208,8 +204,10 @@
             A json-format string representing the test results.
         """
         d = collections.OrderedDict()
-        d["ControllerInfo"] = {record.controller_name: record.controller_info
-                               for record in self.controller_info}
+        d["ControllerInfo"] = {
+            record.controller_name: record.controller_info
+            for record in self.controller_info
+        }
         d["Results"] = [record.to_dict() for record in self.executed]
         d["Summary"] = self.summary_dict()
         d["Error"] = self.errors_list()
@@ -236,12 +234,13 @@
         l = list()
         for record in self.error:
             if isinstance(record, TestResultRecord):
-                keys = [TestResultEnums.RECORD_NAME,
-                        TestResultEnums.RECORD_DETAILS,
-                        TestResultEnums.RECORD_EXTRA_ERRORS]
+                keys = [
+                    TestResultEnums.RECORD_NAME,
+                    TestResultEnums.RECORD_DETAILS,
+                    TestResultEnums.RECORD_EXTRA_ERRORS,
+                ]
             elif isinstance(record, ExceptionRecord):
-                keys = [TestResultEnums.RECORD_DETAILS,
-                        TestResultEnums.RECORD_POSITION]
+                keys = [TestResultEnums.RECORD_DETAILS, TestResultEnums.RECORD_POSITION]
             else:
                 return []
             l.append({k: record.to_dict()[k] for k in keys})
diff --git a/src/antlion/test_decorators.py b/src/antlion/test_decorators.py
index 2ec1835..a152f4f 100644
--- a/src/antlion/test_decorators.py
+++ b/src/antlion/test_decorators.py
@@ -21,8 +21,7 @@
     return test_signals[-1]
 
 
-def repeated_test(num_passes, acceptable_failures=0,
-                  result_selector=__select_last):
+def repeated_test(num_passes, acceptable_failures=0, result_selector=__select_last):
     """A decorator that runs a test case multiple times.
 
     This decorator can be used to run a test multiple times and aggregate the
@@ -49,8 +48,9 @@
             returns the test signal to report the test case as. Note that the
             list also contains any uncaught exceptions from the test execution.
     """
+
     def decorator(func):
-        if not func.__name__.startswith('test_'):
+        if not func.__name__.startswith("test_"):
             raise ValueError('Tests must start with "test_".')
 
         def test_wrapper(self):
@@ -60,8 +60,11 @@
             for i in range(num_passes + acceptable_failures):
                 try:
                     func(self, i + 1)
-                except (signals.TestFailure, signals.TestError,
-                        AssertionError) as signal:
+                except (
+                    signals.TestFailure,
+                    signals.TestError,
+                    AssertionError,
+                ) as signal:
                     test_signals_received.append(signal)
                     num_failures += 1
                 except signals.TestPass as signal:
@@ -74,9 +77,12 @@
                     num_failures += 1
                 else:
                     num_seen_passes += 1
-                    test_signals_received.append(signals.TestPass(
-                        'Test iteration %s of %s passed without details.' % (
-                        i, func.__name__)))
+                    test_signals_received.append(
+                        signals.TestPass(
+                            "Test iteration %s of %s passed without details."
+                            % (i, func.__name__)
+                        )
+                    )
 
                 if num_failures > acceptable_failures:
                     break
diff --git a/src/antlion/test_runner.py b/src/antlion/test_runner.py
index 261d0bd..bcb516f 100644
--- a/src/antlion/test_runner.py
+++ b/src/antlion/test_runner.py
@@ -44,14 +44,16 @@
         The test class in the test module.
     """
     test_classes = []
-    main_module_members = sys.modules['__main__']
+    main_module_members = sys.modules["__main__"]
     for _, module_member in main_module_members.__dict__.items():
         if inspect.isclass(module_member):
             if issubclass(module_member, base_test.BaseTestClass):
                 test_classes.append(module_member)
     if len(test_classes) != 1:
-        logging.error('Expected 1 test class per file, found %s.',
-                      [t.__name__ for t in test_classes])
+        logging.error(
+            "Expected 1 test class per file, found %s.",
+            [t.__name__ for t in test_classes],
+        )
         sys.exit(1)
     return test_classes[0]
 
@@ -82,7 +84,7 @@
     except signals.TestAbortAll:
         raise
     except:
-        logging.exception('Exception when executing %s.', tr.testbed_name)
+        logging.exception("Exception when executing %s.", tr.testbed_name)
     finally:
         tr.stop()
 
@@ -107,14 +109,15 @@
         self.test_run_config = test_configs
         self.testbed_name = self.test_run_config.testbed_name
         start_time = logger.get_log_file_timestamp()
-        self.id = '{}@{}'.format(self.testbed_name, start_time)
+        self.id = "{}@{}".format(self.testbed_name, start_time)
         self.test_run_config.log_path = os.path.abspath(
-            os.path.join(self.test_run_config.log_path, self.testbed_name,
-                         start_time))
+            os.path.join(self.test_run_config.log_path, self.testbed_name, start_time)
+        )
         logger.setup_test_logger(self.log_path, self.testbed_name)
         self.log = logging.getLogger()
         self.test_run_config.summary_writer = records.TestSummaryWriter(
-            os.path.join(self.log_path, records.OUTPUT_FILE_SUMMARY))
+            os.path.join(self.log_path, records.OUTPUT_FILE_SUMMARY)
+        )
         self.run_list = run_list
         self.dump_config()
         self.results = records.TestResult()
@@ -147,8 +150,8 @@
         """
 
         def is_testfile_name(name, ext):
-            if ext == '.py':
-                if name.endswith('Test') or name.endswith('_test'):
+            if ext == ".py":
+                if name.endswith("Test") or name.endswith("_test"):
                     return True
             return False
 
@@ -157,20 +160,20 @@
         for path, name, _ in file_list:
             sys.path.append(path)
             try:
-                with utils.SuppressLogOutput(
-                        log_levels=[logging.INFO, logging.ERROR]):
+                with utils.SuppressLogOutput(log_levels=[logging.INFO, logging.ERROR]):
                     module = importlib.import_module(name)
             except Exception as e:
-                logging.debug('Failed to import %s: %s', path, str(e))
+                logging.debug("Failed to import %s: %s", path, str(e))
                 for test_cls_name, _ in self.run_list:
-                    alt_name = name.replace('_', '').lower()
+                    alt_name = name.replace("_", "").lower()
                     alt_cls_name = test_cls_name.lower()
                     # Only block if a test class on the run list causes an
                     # import error. We need to check against both naming
                     # conventions: AaaBbb and aaa_bbb.
                     if name == test_cls_name or alt_name == alt_cls_name:
-                        msg = ('Encountered error importing test class %s, '
-                               'abort.') % test_cls_name
+                        msg = (
+                            "Encountered error importing test class %s, " "abort."
+                        ) % test_cls_name
                         # This exception is logged here to help with debugging
                         # under py2, because "raise X from Y" syntax is only
                         # supported under py3.
@@ -178,8 +181,8 @@
                         raise ValueError(msg)
                 continue
             for member_name in dir(module):
-                if not member_name.startswith('__'):
-                    if member_name.endswith('Test'):
+                if not member_name.startswith("__"):
+                    if member_name.endswith("Test"):
                         test_class = getattr(module, member_name)
                         if inspect.isclass(test_class):
                             test_classes[member_name] = test_class
@@ -203,15 +206,17 @@
         matches = fnmatch.filter(self.test_classes.keys(), test_cls_name)
         if not matches:
             self.log.info(
-                'Cannot find test class %s or classes matching pattern, '
-                'skipping for now.' % test_cls_name)
-            record = records.TestResultRecord('*all*', test_cls_name)
-            record.test_skip(signals.TestSkip('Test class does not exist.'))
+                "Cannot find test class %s or classes matching pattern, "
+                "skipping for now." % test_cls_name
+            )
+            record = records.TestResultRecord("*all*", test_cls_name)
+            record.test_skip(signals.TestSkip("Test class does not exist."))
             self.results.add_record(record)
             return
         if matches != [test_cls_name]:
-            self.log.info('Found classes matching pattern %s: %s',
-                          test_cls_name, matches)
+            self.log.info(
+                "Found classes matching pattern %s: %s", test_cls_name, matches
+            )
 
         for test_cls_name_match in matches:
             test_cls = self.test_classes[test_cls_name_match]
@@ -245,27 +250,30 @@
             self.test_classes = {test_class.__name__: test_class}
         else:
             t_paths = self.test_run_config.controller_configs[
-                keys.Config.key_test_paths.value]
+                keys.Config.key_test_paths.value
+            ]
             self.test_classes = self.import_test_modules(t_paths)
-        self.log.debug('Executing run list %s.', self.run_list)
+        self.log.debug("Executing run list %s.", self.run_list)
         for test_cls_name, test_case_names in self.run_list:
             if not self.running:
                 break
 
             if test_case_names:
-                self.log.debug('Executing test cases %s in test class %s.',
-                               test_case_names, test_cls_name)
+                self.log.debug(
+                    "Executing test cases %s in test class %s.",
+                    test_case_names,
+                    test_cls_name,
+                )
             else:
-                self.log.debug('Executing test class %s', test_cls_name)
+                self.log.debug("Executing test class %s", test_cls_name)
 
             try:
                 self.run_test_class(test_cls_name, test_case_names)
             except error.ActsError as e:
                 self.results.error.append(ExceptionRecord(e))
-                self.log.error('Test Runner Error: %s' % e.details)
+                self.log.error("Test Runner Error: %s" % e.details)
             except signals.TestAbortAll as e:
-                self.log.warning(
-                    'Abort all subsequent test classes. Reason: %s', e)
+                self.log.warning("Abort all subsequent test classes. Reason: %s", e)
                 raise
 
     def stop(self):
@@ -275,8 +283,10 @@
         This function concludes a test run and writes out a test report.
         """
         if self.running:
-            msg = '\nSummary for test run %s: %s\n' % (
-                self.id, self.results.summary_str())
+            msg = "\nSummary for test run %s: %s\n" % (
+                self.id,
+                self.results.summary_str(),
+            )
             self._write_results_to_file()
             self.log.info(msg.strip())
             logger.kill_test_logger(self.log)
@@ -285,29 +295,34 @@
     def _write_results_to_file(self):
         """Writes test results to file(s) in a serializable format."""
         # Old JSON format
-        path = os.path.join(self.log_path, 'test_run_summary.json')
-        with open(path, 'w') as f:
+        path = os.path.join(self.log_path, "test_run_summary.json")
+        with open(path, "w") as f:
             f.write(self.results.json_str())
         # New YAML format
-        self.summary_writer.dump(self.results.summary_dict(),
-                                 records.TestSummaryEntryType.SUMMARY)
+        self.summary_writer.dump(
+            self.results.summary_dict(), records.TestSummaryEntryType.SUMMARY
+        )
 
     def dump_config(self):
         """Writes the test config to a JSON file under self.log_path"""
-        config_path = os.path.join(self.log_path, 'test_configs.json')
-        with open(config_path, 'a') as f:
-            json.dump(dict(
-                itertools.chain(
-                    self.test_run_config.user_params.items(),
-                    self.test_run_config.controller_configs.items())),
-                      f,
-                      skipkeys=True,
-                      indent=4)
+        config_path = os.path.join(self.log_path, "test_configs.json")
+        with open(config_path, "a") as f:
+            json.dump(
+                dict(
+                    itertools.chain(
+                        self.test_run_config.user_params.items(),
+                        self.test_run_config.controller_configs.items(),
+                    )
+                ),
+                f,
+                skipkeys=True,
+                indent=4,
+            )
 
     def write_test_campaign(self):
         """Log test campaign file."""
-        path = os.path.join(self.log_path, 'test_campaign.log')
-        with open(path, 'w') as f:
+        path = os.path.join(self.log_path, "test_campaign.log")
+        with open(path, "w") as f:
             for test_class, test_cases in self.run_list:
-                f.write('%s:\n%s' % (test_class, ',\n'.join(test_cases)))
-                f.write('\n\n')
+                f.write("%s:\n%s" % (test_class, ",\n".join(test_cases)))
+                f.write("\n\n")
diff --git a/src/antlion/test_utils/abstract_devices/bluetooth_device.py b/src/antlion/test_utils/abstract_devices/bluetooth_device.py
deleted file mode 100644
index c0cb29e..0000000
--- a/src/antlion/test_utils/abstract_devices/bluetooth_device.py
+++ /dev/null
@@ -1,1468 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import inspect
-import logging
-
-from queue import Empty
-
-from antlion.controllers.android_device import AndroidDevice
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.test_utils.bt.bt_constants import ble_scan_settings_modes
-from antlion.test_utils.bt.bt_constants import gatt_cb_strings
-from antlion.test_utils.bt.bt_constants import gatt_event
-from antlion.test_utils.bt.bt_constants import scan_result
-from antlion.test_utils.bt.bt_gatt_utils import GattTestUtilsError
-from antlion.test_utils.bt.bt_gatt_utils import disconnect_gatt_connection
-from antlion.test_utils.bt.bt_gatt_utils import setup_gatt_connection
-from antlion.test_utils.fuchsia.bt_test_utils import le_scan_for_device_by_name
-
-import antlion.test_utils.bt.bt_test_utils as bt_test_utils
-
-
-def create_bluetooth_device(hardware_device):
-    """Creates a generic Bluetooth device based on type of device that is sent
-    to the functions.
-
-    Args:
-        hardware_device: A Bluetooth hardware device that is supported by ACTS.
-    """
-    if isinstance(hardware_device, FuchsiaDevice):
-        return FuchsiaBluetoothDevice(hardware_device)
-    elif isinstance(hardware_device, AndroidDevice):
-        return AndroidBluetoothDevice(hardware_device)
-    else:
-        raise ValueError('Unable to create BluetoothDevice for type %s' %
-                         type(hardware_device))
-
-
-class BluetoothDevice(object):
-    """Class representing a generic Bluetooth device.
-
-    Each object of this class represents a generic Bluetooth device.
-    Android device and Fuchsia devices are the currently supported devices.
-
-    Attributes:
-        device: A generic Bluetooth device.
-    """
-
-    def __init__(self, device):
-        self.device = device
-        self.log = logging
-
-    def a2dp_initiate_open_stream(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def start_profile_a2dp_sink(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def stop_profile_a2dp_sink(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def start_pairing_helper(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def set_discoverable(self, is_discoverable):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def bluetooth_toggle_state(self, state):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_discover_characteristic_by_uuid(self, peer_identifier,
-                                                    uuid):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def initialize_bluetooth_controller(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def get_pairing_pin(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def input_pairing_pin(self, pin):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def get_bluetooth_local_address(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_connect(self, peer_identifier, transport, autoconnect):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_write_characteristic_without_response_by_handle(
-            self, peer_identifier, handle, value):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_write_characteristic_by_handle(self, peer_identifier,
-                                                   handle, offset, value):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_read_characteristic_by_handle(self, peer_identifier,
-                                                  handle):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_read_characteristic_by_uuid(self, peer_identifier, uuid):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_read_long_characteristic_by_handle(self, peer_identifier,
-                                                       handle, offset,
-                                                       max_bytes):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_enable_notifiy_characteristic_by_handle(
-            self, peer_identifier, handle):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_disable_notifiy_characteristic_by_handle(
-            self, peer_identifier, handle):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_read_descriptor_by_handle(self, peer_identifier, handle):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_write_descriptor_by_handle(self, peer_identifier, handle,
-                                               offset, value):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_long_read_descriptor_by_handle(self, peer_identifier,
-                                                   handle, offset, max_bytes):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_disconnect(self, peer_identifier):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_refresh(self, peer_identifier):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def le_scan_with_name_filter(self, name, timeout):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def log_info(self, log):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def reset_bluetooth(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def sdp_add_search(self, attribute_list, profile_id):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def sdp_add_service(self, sdp_record):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def sdp_clean_up(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def sdp_init(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def sdp_remove_service(self, service_id):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def start_le_advertisement(self, adv_data, scan_response, adv_interval,
-                               connectable):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def stop_le_advertisement(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def set_bluetooth_local_name(self, name):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def setup_gatt_server(self, database):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def close_gatt_server(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def unbond_device(self, peer_identifier):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def unbond_all_known_devices(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def init_pair(self, peer_identifier, security_level, non_bondable,
-                  transport):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-
-class AndroidBluetoothDevice(BluetoothDevice):
-    """Class wrapper for an Android Bluetooth device.
-
-    Each object of this class represents a generic Bluetooth device.
-    Android device and Fuchsia devices are the currently supported devices/
-
-    Attributes:
-        android_device: An Android Bluetooth device.
-    """
-
-    def __init__(self, android_device):
-        super().__init__(android_device)
-        self.gatt_timeout = 10
-        self.peer_mapping = {}
-        self.discovered_services_index = None
-
-    def _client_wait(self, gatt_event, gatt_callback):
-        return self._timed_pop(gatt_event, gatt_callback)
-
-    def _timed_pop(self, gatt_event, gatt_callback):
-        expected_event = gatt_event["evt"].format(gatt_callback)
-        try:
-            return self.device.ed.pop_event(expected_event, self.gatt_timeout)
-        except Empty as emp:
-            raise AssertionError(gatt_event["err"].format(expected_event))
-
-    def _setup_discovered_services_index(self, bluetooth_gatt):
-        """ Sets the discovered services index for the gatt connection
-        related to the Bluetooth GATT callback object.
-
-        Args:
-            bluetooth_gatt: The BluetoothGatt callback id
-        """
-        if not self.discovered_services_index:
-            self.device.droid.gattClientDiscoverServices(bluetooth_gatt)
-            expected_event = gatt_cb_strings['gatt_serv_disc'].format(
-                self.gatt_callback)
-            event = self.dut.ed.pop_event(expected_event, self.gatt_timeout)
-            self.discovered_services_index = event['data']['ServicesIndex']
-
-    def a2dp_initiate_open_stream(self):
-        raise NotImplementedError("{} not yet implemented.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def start_profile_a2dp_sink(self):
-        raise NotImplementedError("{} not yet implemented.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def stop_profile_a2dp_sink(self):
-        raise NotImplementedError("{} not yet implemented.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def bluetooth_toggle_state(self, state):
-        self.device.droid.bluetoothToggleState(state)
-
-    def set_discoverable(self, is_discoverable):
-        """ Sets the device's discoverability.
-
-        Args:
-            is_discoverable: True if discoverable, false if not discoverable
-        """
-        if is_discoverable:
-            self.device.droid.bluetoothMakeDiscoverable()
-        else:
-            self.device.droid.bluetoothMakeUndiscoverable()
-
-    def initialize_bluetooth_controller(self):
-        """ Just pass for Android as there is no concept of initializing
-        a Bluetooth controller.
-        """
-
-    def start_pairing_helper(self):
-        """ Starts the Android pairing helper.
-        """
-        self.device.droid.bluetoothStartPairingHelper(True)
-
-    def gatt_client_write_characteristic_without_response_by_handle(
-            self, peer_identifier, handle, value):
-        """ Perform a GATT Client write Characteristic without response to
-        remote peer GATT server database.
-
-        Args:
-            peer_identifier: The mac address associated with the GATT connection
-            handle: The characteristic handle (or instance id).
-            value: The list of bytes to write.
-        Returns:
-            True if success, False if failure.
-        """
-        peer_info = self.peer_mapping.get(peer_identifier)
-        if not peer_info:
-            self.log.error(
-                "Peer idenifier {} not currently connected or unknown.".format(
-                    peer_identifier))
-            return False
-        self._setup_discovered_services_index()
-        self.device.droid.gattClientWriteCharacteristicByInstanceId(
-            peer_info.get('bluetooth_gatt'), self.discovered_services_index,
-            handle, value)
-        try:
-            event = self._client_wait(gatt_event['char_write'],
-                                      peer_info.get('gatt_callback'))
-        except AssertionError as err:
-            self.log.error("Failed to write Characteristic: {}".format(err))
-        return True
-
-    def gatt_client_write_characteristic_by_handle(self, peer_identifier,
-                                                   handle, offset, value):
-        """ Perform a GATT Client write Characteristic without response to
-        remote peer GATT server database.
-
-        Args:
-            peer_identifier: The mac address associated with the GATT connection
-            handle: The characteristic handle (or instance id).
-            offset: Not used yet.
-            value: The list of bytes to write.
-        Returns:
-            True if success, False if failure.
-        """
-        peer_info = self.peer_mapping.get(peer_identifier)
-        if not peer_info:
-            self.log.error(
-                "Peer idenifier {} not currently connected or unknown.".format(
-                    peer_identifier))
-            return False
-        self._setup_discovered_services_index()
-        self.device.droid.gattClientWriteCharacteristicByInstanceId(
-            peer_info.get('bluetooth_gatt'), self.discovered_services_index,
-            handle, value)
-        try:
-            event = self._client_wait(gatt_event['char_write'],
-                                      peer_info.get('gatt_callback'))
-        except AssertionError as err:
-            self.log.error("Failed to write Characteristic: {}".format(err))
-        return True
-
-    def gatt_client_read_characteristic_by_handle(self, peer_identifier,
-                                                  handle):
-        """ Perform a GATT Client read Characteristic to remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The mac address associated with the GATT connection
-            handle: The characteristic handle (or instance id).
-        Returns:
-            Value of Characteristic if success, None if failure.
-        """
-        peer_info = self.peer_mapping.get(peer_identifier)
-        if not peer_info:
-            self.log.error(
-                "Peer idenifier {} not currently connected or unknown.".format(
-                    peer_identifier))
-            return False
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientReadCharacteristicByInstanceId(
-            peer_info.get('bluetooth_gatt'), self.discovered_services_index,
-            handle)
-        try:
-            event = self._client_wait(gatt_event['char_read'],
-                                      peer_info.get('gatt_callback'))
-        except AssertionError as err:
-            self.log.error("Failed to read Characteristic: {}".format(err))
-
-        return event['data']['CharacteristicValue']
-
-    def gatt_client_read_long_characteristic_by_handle(self, peer_identifier,
-                                                       handle, offset,
-                                                       max_bytes):
-        """ Perform a GATT Client read Characteristic to remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The mac address associated with the GATT connection
-            offset: Not used yet.
-            handle: The characteristic handle (or instance id).
-            max_bytes: Not used yet.
-        Returns:
-            Value of Characteristic if success, None if failure.
-        """
-        peer_info = self.peer_mapping.get(peer_identifier)
-        if not peer_info:
-            self.log.error(
-                "Peer idenifier {} not currently connected or unknown.".format(
-                    peer_identifier))
-            return False
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientReadCharacteristicByInstanceId(
-            peer_info.get('bluetooth_gatt'), self.discovered_services_index,
-            handle)
-        try:
-            event = self._client_wait(gatt_event['char_read'],
-                                      peer_info.get('gatt_callback'))
-        except AssertionError as err:
-            self.log.error("Failed to read Characteristic: {}".format(err))
-
-        return event['data']['CharacteristicValue']
-
-    def gatt_client_enable_notifiy_characteristic_by_handle(
-            self, peer_identifier, handle):
-        """ Perform a GATT Client enable Characteristic notification to remote
-        peer GATT server database.
-
-        Args:
-            peer_identifier: The mac address associated with the GATT connection
-            handle: The characteristic handle.
-        Returns:
-            True is success, False if failure.
-        """
-        raise NotImplementedError("{} not yet implemented.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_disable_notifiy_characteristic_by_handle(
-            self, peer_identifier, handle):
-        """ Perform a GATT Client disable Characteristic notification to remote
-        peer GATT server database.
-
-        Args:
-            peer_identifier: The mac address associated with the GATT connection
-            handle: The characteristic handle.
-        Returns:
-            True is success, False if failure.
-        """
-        raise NotImplementedError("{} not yet implemented.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_read_descriptor_by_handle(self, peer_identifier, handle):
-        """ Perform a GATT Client read Descriptor to remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The mac address associated with the GATT connection
-            handle: The Descriptor handle (or instance id).
-        Returns:
-            Value of Descriptor if success, None if failure.
-        """
-        peer_info = self.peer_mapping.get(peer_identifier)
-        if not peer_info:
-            self.log.error(
-                "Peer idenifier {} not currently connected or unknown.".format(
-                    peer_identifier))
-            return False
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientReadDescriptorByInstanceId(
-            peer_info.get('bluetooth_gatt'), self.discovered_services_index,
-            handle)
-        try:
-            event = self._client_wait(gatt_event['desc_read'],
-                                      peer_info.get('gatt_callback'))
-        except AssertionError as err:
-            self.log.error("Failed to read Descriptor: {}".format(err))
-        # TODO: Implement sending Descriptor value in SL4A such that the data
-        # can be represented by: event['data']['DescriptorValue']
-        return ""
-
-    def gatt_client_write_descriptor_by_handle(self, peer_identifier, handle,
-                                               offset, value):
-        """ Perform a GATT Client write Descriptor to the remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The mac address associated with the GATT connection
-            handle: The Descriptor handle (or instance id).
-            offset: Not used yet
-            value: The list of bytes to write.
-        Returns:
-            True if success, False if failure.
-        """
-        peer_info = self.peer_mapping.get(peer_identifier)
-        if not peer_info:
-            self.log.error(
-                "Peer idenifier {} not currently connected or unknown.".format(
-                    peer_identifier))
-            return False
-        self._setup_discovered_services_index()
-        self.device.droid.gattClientWriteDescriptorByInstanceId(
-            peer_info.get('bluetooth_gatt'), self.discovered_services_index,
-            handle, value)
-        try:
-            event = self._client_wait(gatt_event['desc_write'],
-                                      peer_info.get('gatt_callback'))
-        except AssertionError as err:
-            self.log.error("Failed to write Characteristic: {}".format(err))
-        return True
-
-    def gatt_connect(self, peer_identifier, transport, autoconnect=False):
-        """ Perform a GATT connection to a perihperal.
-
-        Args:
-            peer_identifier: The mac address to connect to.
-            transport: Which transport to use.
-            autoconnect: Set autocnnect to True or False.
-        Returns:
-            True if success, False if failure.
-        """
-        try:
-            bluetooth_gatt, gatt_callback = setup_gatt_connection(
-                self.device, peer_identifier, autoconnect, transport)
-            self.peer_mapping[peer_identifier] = {
-                "bluetooth_gatt": bluetooth_gatt,
-                "gatt_callback": gatt_callback
-            }
-        except GattTestUtilsError as err:
-            self.log.error(err)
-            return False
-        return True
-
-    def gatt_disconnect(self, peer_identifier):
-        """ Perform a GATT disconnect from a perihperal.
-
-        Args:
-            peer_identifier: The peer to disconnect from.
-        Returns:
-            True if success, False if failure.
-        """
-        peer_info = self.peer_mapping.get(peer_identifier)
-        if not peer_info:
-            self.log.error(
-                "No previous connections made to {}".format(peer_identifier))
-            return False
-
-        try:
-            disconnect_gatt_connection(self.device,
-                                       peer_info.get("bluetooth_gatt"),
-                                       peer_info.get("gatt_callback"))
-            self.device.droid.gattClientClose(peer_info.get("bluetooth_gatt"))
-        except GattTestUtilsError as err:
-            self.log.error(err)
-            return False
-        self.device.droid.gattClientClose(peer_info.get("bluetooth_gatt"))
-
-    def gatt_client_refresh(self, peer_identifier):
-        """ Perform a GATT Client Refresh of a perihperal.
-
-        Clears the internal cache and forces a refresh of the services from the
-        remote device.
-
-        Args:
-            peer_identifier: The peer to refresh.
-        """
-        peer_info = self.peer_mapping.get(peer_identifier)
-        if not peer_info:
-            self.log.error(
-                "No previous connections made to {}".format(peer_identifier))
-            return False
-        self.device.droid.gattClientRefresh(peer_info["bluetooth_gatt"])
-
-    def le_scan_with_name_filter(self, name, timeout):
-        """ Scan over LE for a specific device name.
-
-         Args:
-            name: The name filter to set.
-            timeout: The timeout to wait to find the advertisement.
-        Returns:
-            Discovered mac address or None
-        """
-        self.device.droid.bleSetScanSettingsScanMode(
-            ble_scan_settings_modes['low_latency'])
-        filter_list = self.device.droid.bleGenFilterList()
-        scan_settings = self.device.droid.bleBuildScanSetting()
-        scan_callback = self.device.droid.bleGenScanCallback()
-        self.device.droid.bleSetScanFilterDeviceName(name)
-        self.device.droid.bleBuildScanFilter(filter_list)
-        self.device.droid.bleSetScanFilterDeviceName(self.name)
-        self.device.droid.bleStartBleScan(filter_list, scan_settings,
-                                          scan_callback)
-        try:
-            event = self.device.ed.pop_event(scan_result.format(scan_callback),
-                                             timeout)
-            return event['data']['Result']['deviceInfo']['address']
-        except Empty as err:
-            self.log.info("Scanner did not find advertisement {}".format(err))
-            return None
-
-    def log_info(self, log):
-        """ Log directly onto the device.
-
-        Args:
-            log: The informative log.
-        """
-        self.device.droid.log.logI(log)
-
-    def set_bluetooth_local_name(self, name):
-        """ Sets the Bluetooth controller's local name
-        Args:
-            name: The name to set.
-        """
-        self.device.droid.bluetoothSetLocalName(name)
-
-    def get_local_bluetooth_address(self):
-        """ Returns the Bluetooth local address.
-        """
-        return self.device.droid.bluetoothGetLocalAddress()
-
-    def reset_bluetooth(self):
-        """ Resets Bluetooth on the Android Device.
-        """
-        bt_test_utils.reset_bluetooth([self.device])
-
-    def sdp_add_search(self, attribute_list, profile_id):
-        """Adds an SDP search record.
-        Args:
-            attribute_list: The list of attributes to set
-            profile_id: The profile ID to set.
-        """
-        # Android devices currently have no hooks to modify the SDP record.
-
-    def sdp_add_service(self, sdp_record):
-        """Adds an SDP service record.
-        Args:
-            sdp_record: The dictionary representing the search record to add.
-        Returns:
-            service_id: The service id to track the service record published.
-                None if failed.
-        """
-        # Android devices currently have no hooks to modify the SDP record.
-
-    def sdp_clean_up(self):
-        """Cleans up all objects related to SDP.
-        """
-        self.device.sl4f.sdp_lib.cleanUp()
-
-    def sdp_init(self):
-        """Initializes SDP on the device.
-        """
-        # Android devices currently have no hooks to modify the SDP record.
-
-    def sdp_remove_service(self, service_id):
-        """Removes a service based on an input id.
-        Args:
-            service_id: The service ID to remove.
-        """
-        # Android devices currently have no hooks to modify the SDP record.
-
-    def unbond_all_known_devices(self):
-        """ Unbond all known remote devices.
-        """
-        self.device.droid.bluetoothFactoryReset()
-
-    def unbond_device(self, peer_identifier):
-        """ Unbond peer identifier.
-
-        Args:
-            peer_identifier: The mac address for the peer to unbond.
-
-        """
-        self.device.droid.bluetoothUnbond(peer_identifier)
-
-    def init_pair(self, peer_identifier, security_level, non_bondable,
-                  transport):
-        """ Send an outgoing pairing request the input peer_identifier.
-
-        Android currently does not support setting various security levels or
-        bondable modes. Making them available for other bluetooth_device
-        variants. Depending on the Address type, Android will figure out the
-        transport to pair automatically.
-
-        Args:
-            peer_identifier: A string representing the device id.
-            security_level: Not yet implemented. See Fuchsia device impl.
-            non_bondable: Not yet implemented. See Fuchsia device impl.
-            transport: Not yet implemented. See Fuchsia device impl.
-
-        """
-        self.dut.droid.bluetoothBond(self.peer_identifier)
-
-
-class FuchsiaBluetoothDevice(BluetoothDevice):
-    """Class wrapper for an Fuchsia Bluetooth device.
-
-    Each object of this class represents a generic luetooth device.
-    Android device and Fuchsia devices are the currently supported devices/
-
-    Attributes:
-        fuchsia_device: A Fuchsia Bluetooth device.
-    """
-
-    def __init__(self, fuchsia_device):
-        super().__init__(fuchsia_device)
-
-    def a2dp_initiate_open_stream(self):
-        raise NotImplementedError("{} not yet implemented.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def start_profile_a2dp_sink(self):
-        """ Starts the A2DP sink profile.
-        """
-        self.device.start_v1_component("bt-a2dp-sink")
-
-    def stop_profile_a2dp_sink(self):
-        """ Stops the A2DP sink profile.
-        """
-        self.device.stop_v1_component("bt-a2dp-sink")
-
-    def start_pairing_helper(self):
-        self.device.sl4f.bts_lib.acceptPairing()
-
-    def bluetooth_toggle_state(self, state):
-        """Stub for Fuchsia implementation."""
-
-    def set_discoverable(self, is_discoverable):
-        """ Sets the device's discoverability.
-
-        Args:
-            is_discoverable: True if discoverable, false if not discoverable
-        """
-        self.device.sl4f.bts_lib.setDiscoverable(is_discoverable)
-
-    def get_pairing_pin(self):
-        """ Get the pairing pin from the active pairing delegate.
-        """
-        return self.device.sl4f.bts_lib.getPairingPin()['result']
-
-    def input_pairing_pin(self, pin):
-        """ Input pairing pin to active pairing delegate.
-
-        Args:
-            pin: The pin to input.
-        """
-        self.device.sl4f.bts_lib.inputPairingPin(pin)
-
-    def initialize_bluetooth_controller(self):
-        """ Initialize Bluetooth controller for first time use.
-        """
-        self.device.sl4f.bts_lib.initBluetoothSys()
-
-    def get_local_bluetooth_address(self):
-        """ Returns the Bluetooth local address.
-        """
-        return self.device.sl4f.bts_lib.getActiveAdapterAddress().get("result")
-
-    def set_bluetooth_local_name(self, name):
-        """ Sets the Bluetooth controller's local name
-        Args:
-            name: The name to set.
-        """
-        self.device.sl4f.bts_lib.setName(name)
-
-    def gatt_client_write_characteristic_without_response_by_handle(
-            self, peer_identifier, handle, value):
-        """ Perform a GATT Client write Characteristic without response to
-        remote peer GATT server database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The characteristic handle.
-            value: The list of bytes to write.
-        Returns:
-            True if success, False if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.warn(
-                "Unable to find handle {} in GATT server db.".format(handle))
-        result = self.device.sl4f.gattc_lib.writeCharByIdWithoutResponse(
-            handle, value)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to write characteristic handle {} with err: {}".format(
-                    handle, result.get("error")))
-            return False
-        return True
-
-    def gatt_client_write_characteristic_by_handle(self, peer_identifier,
-                                                   handle, offset, value):
-        """ Perform a GATT Client write Characteristic to remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The characteristic handle.
-            offset: The offset to start writing to.
-            value: The list of bytes to write.
-        Returns:
-            True if success, False if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.warn(
-                "Unable to find handle {} in GATT server db.".format(handle))
-        result = self.device.sl4f.gattc_lib.writeCharById(
-            handle, offset, value)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to write characteristic handle {} with err: {}".format(
-                    handle, result.get("error")))
-            return False
-        return True
-
-    def gatt_client_write_long_characteristic_by_handle(
-            self, peer_identifier, handle, offset, value, reliable_mode=False):
-        """ Perform a GATT Client write long Characteristic to remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The characteristic handle.
-            offset: The offset to start writing to.
-            value: The list of bytes to write.
-            reliable_mode: A bool value representing a reliable write or not.
-        Returns:
-            True if success, False if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.error(
-                "Unable to find handle {} in GATT server db.".format(handle))
-            return False
-        result = self.device.sl4f.gattc_lib.writeLongCharById(
-            handle, offset, value, reliable_mode)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to write long characteristic handle {} with err: {}".
-                format(peer_identifier, result.get("error")))
-            return False
-        return True
-
-    def gatt_client_write_long_descriptor_by_handle(self, peer_identifier,
-                                                    handle, offset, value):
-        """ Perform a GATT Client write long Descriptor to remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The descriptor handle.
-            offset: The offset to start writing to.
-            value: The list of bytes to write.
-        Returns:
-            True if success, False if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.error(
-                "Unable to find handle {} in GATT server db.".format(handle))
-            return False
-        result = self.device.sl4f.gattc_lib.writeLongDescById(
-            handle, offset, value)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to write long descriptor handle {} with err: {}".
-                format(peer_identifier, result.get("error")))
-            return False
-        return True
-
-    def gatt_client_read_characteristic_by_handle(self, peer_identifier,
-                                                  handle):
-        """ Perform a GATT Client read Characteristic to remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The characteristic handle.
-        Returns:
-            Value of Characteristic if success, None if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.warn(
-                "Unable to find handle {} in GATT server db.".format(handle))
-        result = self.device.sl4f.gattc_lib.readCharacteristicById(handle)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to read characteristic handle {} with err: {}".format(
-                    handle, result.get("error")))
-            return None
-        return result.get("result")
-
-    def gatt_client_read_characteristic_by_uuid(self, peer_identifier, uuid):
-        """ Perform a GATT Client read Characteristic by uuid to remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            uuid: The characteristic uuid.
-        Returns:
-            Value of Characteristic if success, None if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, uuid, uuid=True)):
-            self.log.warn(
-                "Unable to find uuid {} in GATT server db.".format(uuid))
-        result = self.device.sl4f.gattc_lib.readCharacteristicByType(uuid)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to read characteristic uuid {} with err: {}".format(
-                    uuid, result.get("error")))
-            return None
-        return result.get("result")
-
-    def gatt_client_read_long_characteristic_by_handle(self, peer_identifier,
-                                                       handle, offset,
-                                                       max_bytes):
-        """ Perform a GATT Client read Characteristic to remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The characteristic handle.
-            offset: The offset to start reading.
-            max_bytes: The max bytes to return for each read.
-        Returns:
-            Value of Characteristic if success, None if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.warn(
-                "Unable to find handle {} in GATT server db.".format(handle))
-        result = self.device.sl4f.gattc_lib.readLongCharacteristicById(
-            handle, offset, max_bytes)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to read characteristic handle {} with err: {}".format(
-                    handle, result.get("error")))
-            return None
-        return result.get("result")
-
-    def gatt_client_enable_notifiy_characteristic_by_handle(
-            self, peer_identifier, handle):
-        """ Perform a GATT Client enable Characteristic notification to remote
-        peer GATT server database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The characteristic handle.
-        Returns:
-            True is success, False if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.warn(
-                "Unable to find handle {} in GATT server db.".format(handle))
-        result = self.device.sl4f.gattc_lib.enableNotifyCharacteristic(handle)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to enable characteristic notifications for handle {} "
-                "with err: {}".format(handle, result.get("error")))
-            return None
-        return result.get("result")
-
-    def gatt_client_disable_notifiy_characteristic_by_handle(
-            self, peer_identifier, handle):
-        """ Perform a GATT Client disable Characteristic notification to remote
-        peer GATT server database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The characteristic handle.
-        Returns:
-            True is success, False if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.warn(
-                "Unable to find handle {} in GATT server db.".format(handle))
-        result = self.device.sl4f.gattc_lib.disableNotifyCharacteristic(handle)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to disable characteristic notifications for handle {} "
-                "with err: {}".format(peer_identifier, result.get("error")))
-            return None
-        return result.get("result")
-
-    def gatt_client_read_descriptor_by_handle(self, peer_identifier, handle):
-        """ Perform a GATT Client read Descriptor to remote peer GATT server
-        database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The Descriptor handle.
-        Returns:
-            Value of Descriptor if success, None if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.warn(
-                "Unable to find handle {} in GATT server db.".format(handle))
-        result = self.device.sl4f.gattc_lib.readDescriptorById(handle)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to read descriptor for handle {} with err: {}".format(
-                    peer_identifier, result.get("error")))
-            return None
-        return result.get("result")
-
-    def gatt_client_write_descriptor_by_handle(self, peer_identifier, handle,
-                                               offset, value):
-        """ Perform a GATT Client write Descriptor to remote peer GATT server
-        database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The Descriptor handle.
-            offset: The offset to start writing at.
-            value: The list of bytes to write.
-        Returns:
-            True if success, False if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.warn(
-                "Unable to find handle {} in GATT server db.".format(handle))
-        result = self.device.sl4f.gattc_lib.writeDescriptorById(
-            handle, offset, value)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to write descriptor for handle {} with err: {}".format(
-                    peer_identifier, result.get("error")))
-            return None
-        return True
-
-    def gatt_connect(self, peer_identifier, transport, autoconnect):
-        """ Perform a GATT connection to a perihperal.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            transport: Not implemented.
-            autoconnect: Not implemented.
-        Returns:
-            True if success, False if failure.
-        """
-        connection_result = self.device.sl4f.gattc_lib.bleConnectToPeripheral(
-            peer_identifier)
-        if connection_result.get("error") is not None:
-            self.log.error("Failed to connect to peer id {}: {}".format(
-                peer_identifier, connection_result.get("error")))
-            return False
-        return True
-
-    def gatt_client_refresh(self, peer_identifier):
-        """ Perform a GATT Client Refresh of a perihperal.
-
-        Clears the internal cache and forces a refresh of the services from the
-        remote device. In Fuchsia there is no FIDL api to automatically do this
-        yet. Therefore just read all Characteristics which satisfies the same
-        requirements.
-
-        Args:
-            peer_identifier: The peer to refresh.
-        """
-        self._read_all_characteristics(peer_identifier)
-
-    def gatt_client_discover_characteristic_by_uuid(self, peer_identifier,
-                                                    uuid):
-        """ Perform a GATT Client Refresh of a perihperal.
-
-        Clears the internal cache and forces a refresh of the services from the
-        remote device. In Fuchsia there is no FIDL api to automatically do this
-        yet. Therefore just read all Characteristics which satisfies the same
-        requirements.
-
-        Args:
-            peer_identifier: The peer to refresh.
-        """
-        self._read_all_characteristics(peer_identifier, uuid)
-
-    def gatt_disconnect(self, peer_identifier):
-        """ Perform a GATT disconnect from a perihperal.
-
-        Args:
-            peer_identifier: The peer to disconnect from.
-        Returns:
-            True if success, False if failure.
-        """
-        disconnect_result = self.device.sl4f.gattc_lib.bleDisconnectPeripheral(
-            peer_identifier)
-        if disconnect_result.get("error") is not None:
-            self.log.error("Failed to disconnect from peer id {}: {}".format(
-                peer_identifier, disconnect_result.get("error")))
-            return False
-        return True
-
-    def reset_bluetooth(self):
-        """Stub for Fuchsia implementation."""
-
-    def sdp_add_search(self, attribute_list, profile_id):
-        """Adds an SDP search record.
-        Args:
-            attribute_list: The list of attributes to set
-            profile_id: The profile ID to set.
-        """
-        return self.device.sl4f.sdp_lib.addSearch(attribute_list, profile_id)
-
-    def sdp_add_service(self, sdp_record):
-        """Adds an SDP service record.
-        Args:
-            sdp_record: The dictionary representing the search record to add.
-        """
-        return self.device.sl4f.sdp_lib.addService(sdp_record)
-
-    def sdp_clean_up(self):
-        """Cleans up all objects related to SDP.
-        """
-        return self.device.sl4f.sdp_lib.cleanUp()
-
-    def sdp_init(self):
-        """Initializes SDP on the device.
-        """
-        return self.device.sl4f.sdp_lib.init()
-
-    def sdp_remove_service(self, service_id):
-        """Removes a service based on an input id.
-        Args:
-            service_id: The service ID to remove.
-        """
-        return self.device.sl4f.sdp_lib.init()
-
-    def start_le_advertisement(self, adv_data, scan_response, adv_interval,
-                               connectable):
-        """ Starts an LE advertisement
-
-        Args:
-            adv_data: Advertisement data.
-            adv_interval: Advertisement interval.
-        """
-        self.device.sl4f.ble_lib.bleStartBleAdvertising(
-            adv_data, scan_response, adv_interval, connectable)
-
-    def stop_le_advertisement(self):
-        """ Stop active LE advertisement.
-        """
-        self.device.sl4f.ble_lib.bleStopBleAdvertising()
-
-    def setup_gatt_server(self, database):
-        """ Sets up an input GATT server.
-
-        Args:
-            database: A dictionary representing the GATT database to setup.
-        """
-        self.device.sl4f.gatts_lib.publishServer(database)
-
-    def close_gatt_server(self):
-        """ Closes an existing GATT server.
-        """
-        self.device.sl4f.gatts_lib.closeServer()
-
-    def le_scan_with_name_filter(self, name, timeout):
-        """ Scan over LE for a specific device name.
-
-        Args:
-            name: The name filter to set.
-            timeout: The timeout to wait to find the advertisement.
-        Returns:
-            Discovered device id or None
-        """
-        partial_match = True
-        return le_scan_for_device_by_name(self.device, self.device.log, name,
-                                          timeout, partial_match)
-
-    def log_info(self, log):
-        """ Log directly onto the device.
-
-        Args:
-            log: The informative log.
-        """
-        self.device.sl4f.logging_lib.logI(log)
-
-    def unbond_all_known_devices(self):
-        """ Unbond all known remote devices.
-        """
-        try:
-            device_list = self.device.sl4f.bts_lib.getKnownRemoteDevices(
-            )['result']
-            for device_info in device_list:
-                device = device_list[device_info]
-                if device['bonded']:
-                    self.device.sl4f.bts_lib.forgetDevice(device['id'])
-        except Exception as err:
-            self.log.err("Unable to unbond all devices: {}".format(err))
-
-    def unbond_device(self, peer_identifier):
-        """ Unbond peer identifier.
-
-        Args:
-            peer_identifier: The peer identifier for the peer to unbond.
-
-        """
-        self.device.sl4f.bts_lib.forgetDevice(peer_identifier)
-
-    def _find_service_id_and_connect_to_service_for_handle(
-            self, peer_identifier, handle, uuid=False):
-        fail_err = "Failed to find handle {} in Peer database."
-        if uuid:
-            handle = handle.lower()
-        try:
-            services = self.device.sl4f.gattc_lib.listServices(peer_identifier)
-            for service in services['result']:
-                service_id = service['id']
-                self.device.sl4f.gattc_lib.connectToService(
-                    peer_identifier, service_id)
-                chars = self.device.sl4f.gattc_lib.discoverCharacteristics()
-
-                for char in chars['result']:
-                    char_id = char['id']
-                    if uuid:
-                        char_id = char['uuid_type']
-                    if handle == char_id:
-                        return True
-                    descriptors = char['descriptors']
-                    for desc in descriptors:
-                        desc_id = desc["id"]
-                        if uuid:
-                            desc_id = desc['uuid_type']
-                        if handle == desc_id:
-                            return True
-        except Exception as err:
-            self.log.error(fail_err.format(err))
-            return False
-
-    def _read_all_characteristics(self, peer_identifier, uuid=None):
-        fail_err = "Failed to read all characteristics with: {}"
-        try:
-            services = self.device.sl4f.gattc_lib.listServices(peer_identifier)
-            for service in services['result']:
-                service_id = service['id']
-                service_uuid = service['uuid_type']
-                self.device.sl4f.gattc_lib.connectToService(
-                    peer_identifier, service_id)
-                chars = self.device.sl4f.gattc_lib.discoverCharacteristics()
-                self.log.info(
-                    "Reading chars in service uuid: {}".format(service_uuid))
-
-                for char in chars['result']:
-                    char_id = char['id']
-                    char_uuid = char['uuid_type']
-                    if uuid and uuid.lower() not in char_uuid.lower():
-                        continue
-                    try:
-                        read_val =  \
-                            self.device.sl4f.gattc_lib.readCharacteristicById(
-                                char_id)
-                        self.log.info(
-                            "\tCharacteristic uuid / Value: {} / {}".format(
-                                char_uuid, read_val['result']))
-                        str_value = ""
-                        for val in read_val['result']:
-                            str_value += chr(val)
-                        self.log.info("\t\tstr val: {}".format(str_value))
-                    except Exception as err:
-                        self.log.error(err)
-        except Exception as err:
-            self.log.error(fail_err.forma(err))
-
-    def _perform_read_all_descriptors(self, peer_identifier):
-        fail_err = "Failed to read all characteristics with: {}"
-        try:
-            services = self.device.sl4f.gattc_lib.listServices(peer_identifier)
-            for service in services['result']:
-                service_id = service['id']
-                service_uuid = service['uuid_type']
-                self.device.sl4f.gattc_lib.connectToService(
-                    peer_identifier, service_id)
-                chars = self.device.sl4f.gattc_lib.discoverCharacteristics()
-                self.log.info(
-                    "Reading descs in service uuid: {}".format(service_uuid))
-
-                for char in chars['result']:
-                    char_id = char['id']
-                    char_uuid = char['uuid_type']
-                    descriptors = char['descriptors']
-                    self.log.info(
-                        "\tReading descs in char uuid: {}".format(char_uuid))
-                    for desc in descriptors:
-                        desc_id = desc["id"]
-                        desc_uuid = desc["uuid_type"]
-                    try:
-                        read_val = self.device.sl4f.gattc_lib.readDescriptorById(
-                            desc_id)
-                        self.log.info(
-                            "\t\tDescriptor uuid / Value: {} / {}".format(
-                                desc_uuid, read_val['result']))
-                    except Exception as err:
-                        pass
-        except Exception as err:
-            self.log.error(fail_err.format(err))
-
-    def init_pair(self, peer_identifier, security_level, non_bondable,
-                  transport):
-        """ Send an outgoing pairing request the input peer_identifier.
-
-        Android currently does not support setting various security levels or
-        bondable modes. Making them available for other bluetooth_device
-        variants. Depending on the Address type, Android will figure out the
-        transport to pair automatically.
-
-        Args:
-            peer_identifier: A string representing the device id.
-            security_level: The security level required for this pairing request
-                represented as a u64. (Only for LE pairing)
-                Available Values
-                1 - ENCRYPTED: Encrypted without MITM protection
-                    (unauthenticated)
-                2 - AUTHENTICATED: Encrypted with MITM protection
-                    (authenticated)
-                None: No pairing security level.
-            non_bondable: A bool representing whether the pairing mode is
-                bondable or not. None is also accepted. False if bondable, True
-                if non-bondable
-            transport: A u64 representing the transport type.
-                Available Values
-                1 - BREDR: Classic BR/EDR transport
-                2 - LE: Bluetooth Low Energy Transport
-        Returns:
-            True if successful, False if failed.
-        """
-        try:
-            self.device.sl4f.bts_lib.pair(peer_identifier, security_level,
-                                          non_bondable, transport)
-            return True
-        except Exception as err:
-            fail_err = "Failed to pair to peer_identifier {} with: {}".format(
-                peer_identifier)
-            self.log.error(fail_err.format(err))
diff --git a/src/antlion/test_utils/abstract_devices/bluetooth_handsfree_abstract_device.py b/src/antlion/test_utils/abstract_devices/bluetooth_handsfree_abstract_device.py
deleted file mode 100644
index 2367b94..0000000
--- a/src/antlion/test_utils/abstract_devices/bluetooth_handsfree_abstract_device.py
+++ /dev/null
@@ -1,340 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import inspect
-import time
-from antlion import asserts
-from antlion.controllers.buds_lib.dev_utils import apollo_sink_events
-from antlion.test_utils.bt.bt_constants import bt_default_timeout
-
-
-
-def validate_controller(controller, abstract_device_class):
-    """Ensure controller has all methods in abstract_device_class.
-    Also checks method signatures to ensure parameters are satisfied.
-
-    Args:
-        controller: instance of a device controller.
-        abstract_device_class: class definition of an abstract_device interface.
-    Raises:
-         NotImplementedError: if controller is missing one or more methods.
-    """
-    ctlr_methods = inspect.getmembers(controller, predicate=callable)
-    reqd_methods = inspect.getmembers(
-        abstract_device_class, predicate=inspect.ismethod)
-    expected_func_names = {method[0] for method in reqd_methods}
-    controller_func_names = {method[0] for method in ctlr_methods}
-
-    if not controller_func_names.issuperset(expected_func_names):
-        raise NotImplementedError(
-            'Controller {} is missing the following functions: {}'.format(
-                controller.__class__.__name__,
-                repr(expected_func_names - controller_func_names)))
-
-    for func_name in expected_func_names:
-        controller_func = getattr(controller, func_name)
-        required_func = getattr(abstract_device_class, func_name)
-        required_signature = inspect.signature(required_func)
-        if inspect.signature(controller_func) != required_signature:
-            raise NotImplementedError(
-                'Method {} must have the signature {}{}.'.format(
-                    controller_func.__qualname__, controller_func.__name__,
-                    required_signature))
-
-
-class BluetoothHandsfreeAbstractDevice:
-    """Base class for all Bluetooth handsfree abstract devices.
-
-    Desired controller classes should have a corresponding Bluetooth handsfree
-    abstract device class defined in this module.
-    """
-
-    @property
-    def mac_address(self):
-        raise NotImplementedError
-
-    def accept_call(self):
-        raise NotImplementedError()
-
-    def end_call(self):
-        raise NotImplementedError()
-
-    def enter_pairing_mode(self):
-        raise NotImplementedError()
-
-    def next_track(self):
-        raise NotImplementedError()
-
-    def pause(self):
-        raise NotImplementedError()
-
-    def play(self):
-        raise NotImplementedError()
-
-    def power_off(self):
-        raise NotImplementedError()
-
-    def power_on(self):
-        raise NotImplementedError()
-
-    def previous_track(self):
-        raise NotImplementedError()
-
-    def reject_call(self):
-        raise NotImplementedError()
-
-    def volume_down(self):
-        raise NotImplementedError()
-
-    def volume_up(self):
-        raise NotImplementedError()
-
-
-class PixelBudsBluetoothHandsfreeAbstractDevice(
-        BluetoothHandsfreeAbstractDevice):
-
-    CMD_EVENT = 'EvtHex'
-
-    def __init__(self, pixel_buds_controller):
-        self.pixel_buds_controller = pixel_buds_controller
-
-    def format_cmd(self, cmd_name):
-        return self.CMD_EVENT + ' ' + apollo_sink_events.SINK_EVENTS[cmd_name]
-
-    @property
-    def mac_address(self):
-        return self.pixel_buds_controller.bluetooth_address
-
-    def accept_call(self):
-        return self.pixel_buds_controller.cmd(
-            self.format_cmd('EventUsrAnswer'))
-
-    def end_call(self):
-        return self.pixel_buds_controller.cmd(
-            self.format_cmd('EventUsrCancelEnd'))
-
-    def enter_pairing_mode(self):
-        return self.pixel_buds_controller.set_pairing_mode()
-
-    def next_track(self):
-        return self.pixel_buds_controller.cmd(
-            self.format_cmd('EventUsrAvrcpSkipForward'))
-
-    def pause(self):
-        return self.pixel_buds_controller.cmd(
-            self.format_cmd('EventUsrAvrcpPause'))
-
-    def play(self):
-        return self.pixel_buds_controller.cmd(
-            self.format_cmd('EventUsrAvrcpPlay'))
-
-    def power_off(self):
-        return self.pixel_buds_controller.power('Off')
-
-    def power_on(self):
-        return self.pixel_buds_controller.power('On')
-
-    def previous_track(self):
-        return self.pixel_buds_controller.cmd(
-            self.format_cmd('EventUsrAvrcpSkipBackward'))
-
-    def reject_call(self):
-        return self.pixel_buds_controller.cmd(
-            self.format_cmd('EventUsrReject'))
-
-    def volume_down(self):
-        return self.pixel_buds_controller.volume('Down')
-
-    def volume_up(self):
-        return self.pixel_buds_controller.volume('Up')
-
-
-class EarstudioReceiverBluetoothHandsfreeAbstractDevice(
-        BluetoothHandsfreeAbstractDevice):
-    def __init__(self, earstudio_controller):
-        self.earstudio_controller = earstudio_controller
-
-    @property
-    def mac_address(self):
-        return self.earstudio_controller.mac_address
-
-    def accept_call(self):
-        return self.earstudio_controller.press_accept_call()
-
-    def end_call(self):
-        return self.earstudio_controller.press_end_call()
-
-    def enter_pairing_mode(self):
-        return self.earstudio_controller.enter_pairing_mode()
-
-    def next_track(self):
-        return self.earstudio_controller.press_next()
-
-    def pause(self):
-        return self.earstudio_controller.press_play_pause()
-
-    def play(self):
-        return self.earstudio_controller.press_play_pause()
-
-    def power_off(self):
-        return self.earstudio_controller.power_off()
-
-    def power_on(self):
-        return self.earstudio_controller.power_on()
-
-    def previous_track(self):
-        return self.earstudio_controller.press_previous()
-
-    def reject_call(self):
-        return self.earstudio_controller.press_reject_call()
-
-    def volume_down(self):
-        return self.earstudio_controller.press_volume_down()
-
-    def volume_up(self):
-        return self.earstudio_controller.press_volume_up()
-
-
-class JaybirdX3EarbudsBluetoothHandsfreeAbstractDevice(
-        BluetoothHandsfreeAbstractDevice):
-    def __init__(self, jaybird_controller):
-        self.jaybird_controller = jaybird_controller
-
-    @property
-    def mac_address(self):
-        return self.jaybird_controller.mac_address
-
-    def accept_call(self):
-        return self.jaybird_controller.press_accept_call()
-
-    def end_call(self):
-        return self.jaybird_controller.press_reject_call()
-
-    def enter_pairing_mode(self):
-        return self.jaybird_controller.enter_pairing_mode()
-
-    def next_track(self):
-        return self.jaybird_controller.press_next()
-
-    def pause(self):
-        return self.jaybird_controller.press_play_pause()
-
-    def play(self):
-        return self.jaybird_controller.press_play_pause()
-
-    def power_off(self):
-        return self.jaybird_controller.power_off()
-
-    def power_on(self):
-        return self.jaybird_controller.power_on()
-
-    def previous_track(self):
-        return self.jaybird_controller.press_previous()
-
-    def reject_call(self):
-        return self.jaybird_controller.press_reject_call()
-
-    def volume_down(self):
-        return self.jaybird_controller.press_volume_down()
-
-    def volume_up(self):
-        return self.jaybird_controller.press_volume_up()
-
-
-class AndroidHeadsetBluetoothHandsfreeAbstractDevice(
-        BluetoothHandsfreeAbstractDevice):
-    def __init__(self, ad_controller):
-        self.ad_controller = ad_controller
-
-    @property
-    def mac_address(self):
-        """Getting device mac with more stability ensurance.
-
-        Sometime, getting mac address is flaky that it returns None. Adding a
-        loop to add more ensurance of getting correct mac address.
-        """
-        device_mac = None
-        start_time = time.time()
-        end_time = start_time + bt_default_timeout
-        while not device_mac and time.time() < end_time:
-            device_mac = self.ad_controller.droid.bluetoothGetLocalAddress()
-        asserts.assert_true(device_mac, 'Can not get the MAC address')
-        return device_mac
-
-    def accept_call(self):
-        return self.ad_controller.droid.telecomAcceptRingingCall(None)
-
-    def end_call(self):
-        return self.ad_controller.droid.telecomEndCall()
-
-    def enter_pairing_mode(self):
-        self.ad_controller.droid.bluetoothStartPairingHelper(True)
-        return self.ad_controller.droid.bluetoothMakeDiscoverable()
-
-    def next_track(self):
-        return (self.ad_controller.droid.bluetoothMediaPassthrough("skipNext"))
-
-    def pause(self):
-        return self.ad_controller.droid.bluetoothMediaPassthrough("pause")
-
-    def play(self):
-        return self.ad_controller.droid.bluetoothMediaPassthrough("play")
-
-    def power_off(self):
-        return self.ad_controller.droid.bluetoothToggleState(False)
-
-    def power_on(self):
-        return self.ad_controller.droid.bluetoothToggleState(True)
-
-    def previous_track(self):
-        return (self.ad_controller.droid.bluetoothMediaPassthrough("skipPrev"))
-
-    def reject_call(self):
-        return self.ad_controller.droid.telecomCallDisconnect(
-            self.ad_controller.droid.telecomCallGetCallIds()[0])
-
-    def reset(self):
-        return self.ad_controller.droid.bluetoothFactoryReset()
-
-    def volume_down(self):
-        target_step = self.ad_controller.droid.getMediaVolume() - 1
-        target_step = max(target_step, 0)
-        return self.ad_controller.droid.setMediaVolume(target_step)
-
-    def volume_up(self):
-        target_step = self.ad_controller.droid.getMediaVolume() + 1
-        max_step = self.ad_controller.droid.getMaxMediaVolume()
-        target_step = min(target_step, max_step)
-        return self.ad_controller.droid.setMediaVolume(target_step)
-
-
-class BluetoothHandsfreeAbstractDeviceFactory:
-    """Generates a BluetoothHandsfreeAbstractDevice for any device controller.
-    """
-
-    _controller_abstract_devices = {
-        'EarstudioReceiver': EarstudioReceiverBluetoothHandsfreeAbstractDevice,
-        'JaybirdX3Earbuds': JaybirdX3EarbudsBluetoothHandsfreeAbstractDevice,
-        'ParentDevice': PixelBudsBluetoothHandsfreeAbstractDevice,
-        'AndroidDevice': AndroidHeadsetBluetoothHandsfreeAbstractDevice
-    }
-
-    def generate(self, controller):
-        class_name = controller.__class__.__name__
-        if class_name in self._controller_abstract_devices:
-            return self._controller_abstract_devices[class_name](controller)
-        else:
-            validate_controller(controller, BluetoothHandsfreeAbstractDevice)
-            return controller
diff --git a/src/antlion/test_utils/abstract_devices/wlan_device.py b/src/antlion/test_utils/abstract_devices/wlan_device.py
index 2d11a44..5891012 100644
--- a/src/antlion/test_utils/abstract_devices/wlan_device.py
+++ b/src/antlion/test_utils/abstract_devices/wlan_device.py
@@ -16,15 +16,12 @@
 
 import inspect
 import logging
-import time
 
-import antlion.test_utils.wifi.wifi_test_utils as awutils
-from antlion.utils import adb_shell_ping
-
-from antlion import asserts
 from antlion.controllers import iperf_client
-from antlion.controllers.fuchsia_device import FuchsiaDevice
 from antlion.controllers.android_device import AndroidDevice
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+from antlion.test_utils.wifi import wifi_test_utils as awutils
+from antlion.utils import adb_shell_ping
 
 FUCHSIA_VALID_SECURITY_TYPES = {"none", "wep", "wpa", "wpa2", "wpa3"}
 
@@ -41,8 +38,9 @@
     elif isinstance(hardware_device, AndroidDevice):
         return AndroidWlanDevice(hardware_device)
     else:
-        raise ValueError('Unable to create WlanDevice for type %s' %
-                         type(hardware_device))
+        raise ValueError(
+            "Unable to create WlanDevice for type %s" % type(hardware_device)
+        )
 
 
 class WlanDevice(object):
@@ -64,121 +62,146 @@
         """Base generic WLAN interface.  Only called if not overridden by
         another supported device.
         """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def reset_wifi(self):
         """Base generic WLAN interface.  Only called if not overridden by
         another supported device.
         """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def take_bug_report(self, test_name=None, begin_time=None):
         """Base generic WLAN interface.  Only called if not overridden by
         another supported device.
         """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def get_log(self, test_name, begin_time):
         """Base generic WLAN interface.  Only called if not overridden by
         another supported device.
         """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def turn_location_off_and_scan_toggle_off(self):
         """Base generic WLAN interface.  Only called if not overridden by
         another supported device.
         """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
-    def associate(self,
-                  target_ssid,
-                  target_pwd=None,
-                  check_connectivity=True,
-                  hidden=False,
-                  target_security=None):
+    def associate(
+        self,
+        target_ssid,
+        target_pwd=None,
+        check_connectivity=True,
+        hidden=False,
+        target_security=None,
+    ):
         """Base generic WLAN interface.  Only called if not overriden by
         another supported device.
         """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def disconnect(self):
         """Base generic WLAN interface.  Only called if not overridden by
         another supported device.
         """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def get_wlan_interface_id_list(self):
         """Base generic WLAN interface.  Only called if not overridden by
         another supported device.
         """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def get_default_wlan_test_interface(self):
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def destroy_wlan_interface(self, iface_id):
         """Base generic WLAN interface.  Only called if not overridden by
         another supported device.
         """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def send_command(self, command):
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def is_connected(self, ssid=None):
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
-    def can_ping(self,
-                 dest_ip,
-                 count=3,
-                 interval=1000,
-                 timeout=1000,
-                 size=25,
-                 additional_ping_params=None):
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+    def can_ping(
+        self,
+        dest_ip,
+        count=3,
+        interval=1000,
+        timeout=1000,
+        size=25,
+        additional_ping_params=None,
+    ):
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
-    def ping(self,
-             dest_ip,
-             count=3,
-             interval=1000,
-             timeout=1000,
-             size=25,
-             additional_ping_params=None):
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+    def ping(
+        self,
+        dest_ip,
+        count=3,
+        interval=1000,
+        timeout=1000,
+        size=25,
+        additional_ping_params=None,
+    ):
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def hard_power_cycle(self, pdus=None):
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def save_network(self, ssid):
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def clear_saved_networks(self):
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def create_iperf_client(self, test_interface=None):
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def feature_is_present(self, feature: str) -> bool:
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
 
 class AndroidWlanDevice(WlanDevice):
@@ -210,13 +233,15 @@
     def turn_location_off_and_scan_toggle_off(self):
         awutils.turn_location_off_and_scan_toggle_off(self.device)
 
-    def associate(self,
-                  target_ssid,
-                  target_pwd=None,
-                  key_mgmt=None,
-                  check_connectivity=True,
-                  hidden=False,
-                  target_security=None):
+    def associate(
+        self,
+        target_ssid,
+        target_pwd=None,
+        key_mgmt=None,
+        check_connectivity=True,
+        hidden=False,
+        target_security=None,
+    ):
         """Function to associate an Android WLAN device.
 
         Args:
@@ -229,20 +254,21 @@
         Returns:
             True if successfully connected to WLAN, False if not.
         """
-        network = {'SSID': target_ssid, 'hiddenSSID': hidden}
+        network = {"SSID": target_ssid, "hiddenSSID": hidden}
         if target_pwd:
-            network['password'] = target_pwd
+            network["password"] = target_pwd
         if key_mgmt:
-            network['security'] = key_mgmt
+            network["security"] = key_mgmt
         try:
             awutils.connect_to_wifi_network(
                 self.device,
                 network,
                 check_connectivity=check_connectivity,
-                hidden=hidden)
+                hidden=hidden,
+            )
             return True
         except Exception as e:
-            self.device.log.info('Failed to associated (%s)' % e)
+            self.device.log.info("Failed to associated (%s)" % e)
             return False
 
     def disconnect(self):
@@ -252,7 +278,7 @@
         pass
 
     def get_default_wlan_test_interface(self):
-        return 'wlan0'
+        return "wlan0"
 
     def destroy_wlan_interface(self, iface_id):
         pass
@@ -263,20 +289,21 @@
     def is_connected(self, ssid=None):
         wifi_info = self.device.droid.wifiGetConnectionInfo()
         if ssid:
-            return 'BSSID' in wifi_info and wifi_info['SSID'] == ssid
-        return 'BSSID' in wifi_info
+            return "BSSID" in wifi_info and wifi_info["SSID"] == ssid
+        return "BSSID" in wifi_info
 
-    def can_ping(self,
-                 dest_ip,
-                 count=3,
-                 interval=1000,
-                 timeout=1000,
-                 size=25,
-                 additional_ping_params=None):
-        return adb_shell_ping(self.device,
-                              dest_ip=dest_ip,
-                              count=count,
-                              timeout=timeout)
+    def can_ping(
+        self,
+        dest_ip,
+        count=3,
+        interval=1000,
+        timeout=1000,
+        size=25,
+        additional_ping_params=None,
+    ):
+        return adb_shell_ping(
+            self.device, dest_ip=dest_ip, count=count, timeout=timeout
+        )
 
     def ping(self, dest_ip, count=3, interval=1000, timeout=1000, size=25):
         pass
@@ -291,7 +318,7 @@
         pass
 
     def create_iperf_client(self, test_interface=None):
-        """ Returns an iperf client on the Android, without requiring a
+        """Returns an iperf client on the Android, without requiring a
         specific config.
 
         Args:
@@ -304,8 +331,8 @@
             test_interface = self.get_default_wlan_test_interface()
 
         return iperf_client.IPerfClientOverAdb(
-            android_device_or_serial=self.device,
-            test_interface=test_interface)
+            android_device_or_serial=self.device, test_interface=test_interface
+        )
 
     def feature_is_present(self, feature: str) -> bool:
         pass
@@ -321,6 +348,8 @@
         fuchsia_device: A Fuchsia WLAN device.
     """
 
+    device: FuchsiaDevice
+
     def __init__(self, fuchsia_device):
         super().__init__(fuchsia_device)
         self.identifier = fuchsia_device.ip
@@ -342,13 +371,15 @@
     def turn_location_off_and_scan_toggle_off(self):
         """Stub for Fuchsia implementation."""
 
-    def associate(self,
-                  target_ssid,
-                  target_pwd=None,
-                  key_mgmt=None,
-                  check_connectivity=True,
-                  hidden=False,
-                  target_security=None):
+    def associate(
+        self,
+        target_ssid,
+        target_pwd=None,
+        key_mgmt=None,
+        check_connectivity=True,
+        hidden=False,
+        target_security=None,
+    ):
         """Function to associate a Fuchsia WLAN device.
 
         Args:
@@ -362,70 +393,81 @@
         Returns:
             True if successfully connected to WLAN, False if not.
         """
-        if self.device.association_mechanism == 'drivers':
+        if self.device.association_mechanism == "drivers":
             bss_scan_response = self.device.sl4f.wlan_lib.wlanScanForBSSInfo()
-            if bss_scan_response.get('error'):
-                self.log.error('Scan for BSS info failed. Err: %s' %
-                               bss_scan_response['error'])
+            if bss_scan_response.get("error"):
+                self.log.error(
+                    "Scan for BSS info failed. Err: %s" % bss_scan_response["error"]
+                )
                 return False
 
-            bss_descs_for_ssid = bss_scan_response['result'].get(
-                target_ssid, None)
+            bss_descs_for_ssid = bss_scan_response["result"].get(target_ssid, None)
             if not bss_descs_for_ssid or len(bss_descs_for_ssid) < 1:
                 self.log.error(
-                    'Scan failed to find a BSS description for target_ssid %s'
-                    % target_ssid)
+                    "Scan failed to find a BSS description for target_ssid %s"
+                    % target_ssid
+                )
                 return False
 
             connection_response = self.device.sl4f.wlan_lib.wlanConnectToNetwork(
-                target_ssid, bss_descs_for_ssid[0], target_pwd=target_pwd)
+                target_ssid, bss_descs_for_ssid[0], target_pwd=target_pwd
+            )
             return self.device.check_connect_response(connection_response)
         else:
             return self.device.wlan_policy_controller.save_and_connect(
-                target_ssid, target_security, password=target_pwd)
+                target_ssid, target_security, password=target_pwd
+            )
 
     def disconnect(self):
         """Function to disconnect from a Fuchsia WLAN device.
-           Asserts if disconnect was not successful.
+        Asserts if disconnect was not successful.
         """
-        if self.device.association_mechanism == 'drivers':
+        if self.device.association_mechanism == "drivers":
             disconnect_response = self.device.sl4f.wlan_lib.wlanDisconnect()
             return self.device.check_disconnect_response(disconnect_response)
         else:
-            return self.device.wlan_policy_controller.remove_all_networks_and_wait_for_no_connections(
+            return (
+                self.device.wlan_policy_controller.remove_all_networks_and_wait_for_no_connections()
             )
 
     def status(self):
         return self.device.sl4f.wlan_lib.wlanStatus()
 
-    def can_ping(self,
-                 dest_ip,
-                 count=3,
-                 interval=1000,
-                 timeout=1000,
-                 size=25,
-                 additional_ping_params=None):
+    def can_ping(
+        self,
+        dest_ip,
+        count=3,
+        interval=1000,
+        timeout=1000,
+        size=25,
+        additional_ping_params=None,
+    ):
         return self.device.can_ping(
             dest_ip,
             count=count,
             interval=interval,
             timeout=timeout,
             size=size,
-            additional_ping_params=additional_ping_params)
+            additional_ping_params=additional_ping_params,
+        )
 
-    def ping(self,
-             dest_ip,
-             count=3,
-             interval=1000,
-             timeout=1000,
-             size=25,
-             additional_ping_params=None):
-        return self.device.ping(dest_ip,
-                                count=count,
-                                interval=interval,
-                                timeout=timeout,
-                                size=size,
-                                additional_ping_params=additional_ping_params)
+    def ping(
+        self,
+        dest_ip,
+        count=3,
+        interval=1000,
+        timeout=1000,
+        size=25,
+        additional_ping_params=None,
+    ):
+        return self.device.ping(
+            dest_ip,
+            count=count,
+            interval=interval,
+            timeout=timeout,
+            size=size,
+            additional_ping_params=additional_ping_params,
+        )
 
     def get_wlan_interface_id_list(self):
         """Function to list available WLAN interfaces.
@@ -433,7 +475,7 @@
         Returns:
             A list of wlan interface IDs.
         """
-        return self.device.sl4f.wlan_lib.wlanGetIfaceIdList().get('result')
+        return self.device.sl4f.wlan_lib.wlanGetIfaceIdList().get("result")
 
     def get_default_wlan_test_interface(self):
         """Returns name of the WLAN client interface"""
@@ -451,18 +493,19 @@
             True if successfully destroyed wlan interface, False if not.
         """
         result = self.device.sl4f.wlan_lib.wlanDestroyIface(iface_id)
-        if result.get('error') is None:
+        if result.get("error") is None:
             return True
         else:
-            self.log.error("Failed to destroy interface with: {}".format(
-                result.get('error')))
+            self.log.error(
+                "Failed to destroy interface with: {}".format(result.get("error"))
+            )
             return False
 
     def send_command(self, command):
         return self.device.ssh.run(str(command)).stdout
 
     def is_connected(self, ssid=None):
-        """ Determines if wlan_device is connected to wlan network.
+        """Determines if wlan_device is connected to wlan network.
 
         Args:
             ssid (optional): string, to check if device is connect to a specific
@@ -475,16 +518,15 @@
                 provided.
         """
         response = self.status()
-        if response.get('error'):
-            raise ConnectionError(
-                'Failed to get client network connection status')
-        result = response.get('result')
+        if response.get("error"):
+            raise ConnectionError("Failed to get client network connection status")
+        result = response.get("result")
         if isinstance(result, dict):
-            connected_to = result.get('Connected')
+            connected_to = result.get("Connected")
             # TODO(https://fxbug.dev/85938): Remove backwards compatibility once
             # ACTS is versioned with Fuchsia.
             if not connected_to:
-                connected_to = result.get('connected_to')
+                connected_to = result.get("connected_to")
             if not connected_to:
                 return False
 
@@ -492,36 +534,40 @@
                 # Replace encoding errors instead of raising an exception.
                 # Since `ssid` is a string, this will not affect the test
                 # for equality.
-                connected_ssid = bytearray(connected_to['ssid']).decode(
-                    encoding='utf-8', errors='replace')
+                connected_ssid = bytearray(connected_to["ssid"]).decode(
+                    encoding="utf-8", errors="replace"
+                )
                 return ssid == connected_ssid
             return True
         return False
 
     def hard_power_cycle(self, pdus):
-        self.device.reboot(reboot_type='hard', testbed_pdus=pdus)
+        self.device.reboot(reboot_type="hard", testbed_pdus=pdus)
 
     def save_network(self, target_ssid, security_type=None, target_pwd=None):
-        if self.device.association_mechanism == 'drivers':
+        if self.device.association_mechanism == "drivers":
             raise EnvironmentError(
-                'Cannot save network using the drivers. Saved networks are a '
-                'policy layer concept.')
+                "Cannot save network using the drivers. Saved networks are a "
+                "policy layer concept."
+            )
         if security_type and security_type not in FUCHSIA_VALID_SECURITY_TYPES:
-            raise TypeError('Invalid security type: %s' % security_type)
+            raise TypeError("Invalid security type: %s" % security_type)
         if not self.device.wlan_policy_controller.save_network(
-                target_ssid, security_type, password=target_pwd):
-            raise EnvironmentError('Failed to save network: %s' % target_ssid)
+            target_ssid, security_type, password=target_pwd
+        ):
+            raise EnvironmentError("Failed to save network: %s" % target_ssid)
 
     def clear_saved_networks(self):
-        if self.device.association_mechanism == 'drivers':
+        if self.device.association_mechanism == "drivers":
             raise EnvironmentError(
-                'Cannot clear saved network using the drivers. Saved networks '
-                'are a policy layer concept.')
+                "Cannot clear saved network using the drivers. Saved networks "
+                "are a policy layer concept."
+            )
         if not self.device.wlan_policy_controller.remove_all_networks():
-            raise EnvironmentError('Failed to clear saved networks')
+            raise EnvironmentError("Failed to clear saved networks")
 
     def create_iperf_client(self, test_interface=None):
-        """ Returns an iperf client on the FuchsiaDevice, without requiring a
+        """Returns an iperf client on the FuchsiaDevice, without requiring a
         specific config.
 
         Args:
@@ -540,12 +586,13 @@
 
         return iperf_client.IPerfClientOverSsh(
             {
-                'user': 'fuchsia',
-                'host': self.device.ip,
-                'ssh_config': self.device.ssh_config
+                "user": "fuchsia",
+                "host": self.device.ip,
+                "ssh_config": self.device.ssh_config,
             },
             ssh_provider=self.device.ssh,
-            test_interface=test_interface)
+            test_interface=test_interface,
+        )
 
     def feature_is_present(self, feature: str) -> bool:
         return feature in self.device.wlan_features
diff --git a/src/antlion/test_utils/abstract_devices/wmm_transceiver.py b/src/antlion/test_utils/abstract_devices/wmm_transceiver.py
index c8b61cc..e38d91a 100644
--- a/src/antlion/test_utils/abstract_devices/wmm_transceiver.py
+++ b/src/antlion/test_utils/abstract_devices/wmm_transceiver.py
@@ -27,22 +27,17 @@
 from antlion.controllers import iperf_client
 from antlion.controllers import iperf_server
 
-AC_VO = 'AC_VO'
-AC_VI = 'AC_VI'
-AC_BE = 'AC_BE'
-AC_BK = 'AC_BK'
+AC_VO = "AC_VO"
+AC_VI = "AC_VI"
+AC_BE = "AC_BE"
+AC_BK = "AC_BK"
 
 # TODO(fxb/61421): Add tests to check all DSCP classes are mapped to the correct
 # AC (there are many that aren't included here). Requires implementation of
 # sniffer.
-DEFAULT_AC_TO_TOS_TAG_MAP = {
-    AC_VO: '0xC0',
-    AC_VI: '0x80',
-    AC_BE: '0x0',
-    AC_BK: '0x20'
-}
-UDP = 'udp'
-TCP = 'tcp'
+DEFAULT_AC_TO_TOS_TAG_MAP = {AC_VO: "0xC0", AC_VI: "0x80", AC_BE: "0x0", AC_BK: "0x20"}
+UDP = "udp"
+TCP = "tcp"
 DEFAULT_IPERF_PORT = 5201
 DEFAULT_STREAM_TIME = 10
 DEFAULT_IP_ADDR_TIMEOUT = 15
@@ -78,13 +73,13 @@
         # If identifier is not provided as func arg, it must be provided via
         # config file.
         if not identifier:
-            identifier = config['identifier']
-        iperf_config = config['iperf_config']
+            identifier = config["identifier"]
+        iperf_config = config["iperf_config"]
 
     except KeyError as err:
         raise WmmTransceiverError(
-            'Parameter not provided as func arg, nor found in config: %s' %
-            err)
+            "Parameter not provided as func arg, nor found in config: %s" % err
+        )
 
     if wlan_devices is None:
         wlan_devices = []
@@ -92,20 +87,22 @@
     if access_points is None:
         access_points = []
 
-    port_range_start = config.get('port_range_start', DEFAULT_IPERF_PORT)
+    port_range_start = config.get("port_range_start", DEFAULT_IPERF_PORT)
 
     wd = None
     ap = None
-    if 'wlan_device' in config:
-        wd = _find_wlan_device(config['wlan_device'], wlan_devices)
-    elif 'access_point' in config:
-        ap = _find_access_point(config['access_point'], access_points)
+    if "wlan_device" in config:
+        wd = _find_wlan_device(config["wlan_device"], wlan_devices)
+    elif "access_point" in config:
+        ap = _find_access_point(config["access_point"], access_points)
 
-    return WmmTransceiver(iperf_config,
-                          identifier,
-                          wlan_device=wd,
-                          access_point=ap,
-                          port_range_start=port_range_start)
+    return WmmTransceiver(
+        iperf_config,
+        identifier,
+        wlan_device=wd,
+        access_point=ap,
+        port_range_start=port_range_start,
+    )
 
 
 def _find_wlan_device(wlan_device_identifier, wlan_devices):
@@ -124,8 +121,9 @@
     for wd in wlan_devices:
         if wlan_device_identifier == wd.identifier:
             return wd
-    raise WmmTransceiverError('No WlanDevice with identifier: %s' %
-                              wlan_device_identifier)
+    raise WmmTransceiverError(
+        "No WlanDevice with identifier: %s" % wlan_device_identifier
+    )
 
 
 def _find_access_point(access_point_ip, access_points):
@@ -143,22 +141,26 @@
     for ap in access_points:
         if ap.ssh_settings.hostname == access_point_ip:
             return ap
-    raise WmmTransceiverError('No AccessPoint with ip: %s' % access_point_ip)
+    raise WmmTransceiverError("No AccessPoint with ip: %s" % access_point_ip)
 
 
 class WmmTransceiver(object):
     """Object for handling WMM tagged streams between devices"""
-    def __init__(self,
-                 iperf_config,
-                 identifier,
-                 wlan_device=None,
-                 access_point=None,
-                 port_range_start=5201):
 
+    def __init__(
+        self,
+        iperf_config,
+        identifier,
+        wlan_device=None,
+        access_point=None,
+        port_range_start=5201,
+    ):
         self.identifier = identifier
         self.log = tracelogger.TraceLogger(
-            WmmTransceiverLoggerAdapter(logging.getLogger(),
-                                        {'identifier': self.identifier}))
+            WmmTransceiverLoggerAdapter(
+                logging.getLogger(), {"identifier": self.identifier}
+            )
+        )
         # WlanDevice or AccessPoint, that is used as the transceiver. Only one
         # will be set. This helps consolodate association, setup, teardown, etc.
         self.wlan_device = wlan_device
@@ -167,7 +169,7 @@
         # Parameters used to create IPerfClient and IPerfServer objects on
         # device
         self._iperf_config = iperf_config
-        self._test_interface = self._iperf_config.get('test_interface')
+        self._test_interface = self._iperf_config.get("test_interface")
         self._port_range_start = port_range_start
         self._next_server_port = port_range_start
 
@@ -224,26 +226,34 @@
         Returns:
             uuid: UUID object, identifier of the stream
         """
-        (receiver, access_category, bandwidth,
-         stream_time) = self._parse_stream_parameters(stream_parameters)
+        (
+            receiver,
+            access_category,
+            bandwidth,
+            stream_time,
+        ) = self._parse_stream_parameters(stream_parameters)
         uuid = uuid4()
 
-        (client, server_ip,
-         server_port) = self._get_stream_resources(uuid, receiver, subnet)
+        (client, server_ip, server_port) = self._get_stream_resources(
+            uuid, receiver, subnet
+        )
 
         self._validate_server_address(server_ip, uuid)
 
-        self.log.info('Running synchronous stream to %s WmmTransceiver' %
-                      receiver.identifier)
-        self._run_traffic(uuid,
-                          client,
-                          server_ip,
-                          server_port,
-                          self._active_streams,
-                          self._stream_results,
-                          access_category=access_category,
-                          bandwidth=bandwidth,
-                          stream_time=stream_time)
+        self.log.info(
+            "Running synchronous stream to %s WmmTransceiver" % receiver.identifier
+        )
+        self._run_traffic(
+            uuid,
+            client,
+            server_ip,
+            server_port,
+            self._active_streams,
+            self._stream_results,
+            access_category=access_category,
+            bandwidth=bandwidth,
+            stream_time=stream_time,
+        )
 
         self._return_stream_resources(uuid)
         return uuid
@@ -260,27 +270,28 @@
         Returns:
             uuid: UUID object, identifier of the stream
         """
-        (receiver, access_category, bandwidth,
-         time) = self._parse_stream_parameters(stream_parameters)
+        (receiver, access_category, bandwidth, time) = self._parse_stream_parameters(
+            stream_parameters
+        )
         uuid = uuid4()
 
-        (client, server_ip,
-         server_port) = self._get_stream_resources(uuid, receiver, subnet)
+        (client, server_ip, server_port) = self._get_stream_resources(
+            uuid, receiver, subnet
+        )
 
         self._validate_server_address(server_ip, uuid)
 
         pending_stream_config = {
-            'client': client,
-            'server_ip': server_ip,
-            'server_port': server_port,
-            'access_category': access_category,
-            'bandwidth': bandwidth,
-            'time': time
+            "client": client,
+            "server_ip": server_ip,
+            "server_port": server_port,
+            "access_category": access_category,
+            "bandwidth": bandwidth,
+            "time": time,
         }
 
         self._pending_async_streams[uuid] = pending_stream_config
-        self.log.info('Stream to %s WmmTransceiver prepared.' %
-                      receiver.identifier)
+        self.log.info("Stream to %s WmmTransceiver prepared." % receiver.identifier)
         return uuid
 
     def start_asynchronous_streams(self, start_time=None):
@@ -293,27 +304,30 @@
         """
         for uuid in self._pending_async_streams:
             pending_stream_config = self._pending_async_streams[uuid]
-            client = pending_stream_config['client']
-            server_ip = pending_stream_config['server_ip']
-            server_port = pending_stream_config['server_port']
-            access_category = pending_stream_config['access_category']
-            bandwidth = pending_stream_config['bandwidth']
-            time = pending_stream_config['time']
+            client = pending_stream_config["client"]
+            server_ip = pending_stream_config["server_ip"]
+            server_port = pending_stream_config["server_port"]
+            access_category = pending_stream_config["access_category"]
+            bandwidth = pending_stream_config["bandwidth"]
+            time = pending_stream_config["time"]
 
-            process = multiprocessing.Process(target=self._run_traffic,
-                                              args=[
-                                                  uuid, client, server_ip,
-                                                  server_port,
-                                                  self._active_streams,
-                                                  self._stream_results
-                                              ],
-                                              kwargs={
-                                                  'access_category':
-                                                  access_category,
-                                                  'bandwidth': bandwidth,
-                                                  'stream_time': time,
-                                                  'start_time': start_time
-                                              })
+            process = multiprocessing.Process(
+                target=self._run_traffic,
+                args=[
+                    uuid,
+                    client,
+                    server_ip,
+                    server_port,
+                    self._active_streams,
+                    self._stream_results,
+                ],
+                kwargs={
+                    "access_category": access_category,
+                    "bandwidth": bandwidth,
+                    "stream_time": time,
+                    "start_time": start_time,
+                },
+            )
 
             # This needs to be set here to ensure its marked active before
             # it even starts.
@@ -334,12 +348,11 @@
             timeout: time, in seconds, to wait for each running process, if any,
                 to join
         """
-        self.log.info('Cleaning up any asynchronous streams.')
+        self.log.info("Cleaning up any asynchronous streams.")
 
         # Releases resources for any streams that were prepared, but no run
         for uuid in self._pending_async_streams:
-            self.log.error(
-                'Pending asynchronous stream %s never ran. Cleaning.' % uuid)
+            self.log.error("Pending asynchronous stream %s never ran. Cleaning." % uuid)
             self._return_stream_resources(uuid)
         self._pending_async_streams.clear()
 
@@ -350,8 +363,9 @@
             process.join(timeout)
             if process.is_alive():
                 self.log.error(
-                    'Stream process failed to join in %s seconds. Terminating.'
-                    % timeout)
+                    "Stream process failed to join in %s seconds. Terminating."
+                    % timeout
+                )
                 process.terminate()
                 process.join()
         self._active_streams.clear()
@@ -384,17 +398,19 @@
 
     # Helper Functions
 
-    def _run_traffic(self,
-                     uuid,
-                     client,
-                     server_ip,
-                     server_port,
-                     active_streams,
-                     stream_results,
-                     access_category=None,
-                     bandwidth=None,
-                     stream_time=DEFAULT_STREAM_TIME,
-                     start_time=None):
+    def _run_traffic(
+        self,
+        uuid,
+        client,
+        server_ip,
+        server_port,
+        active_streams,
+        stream_results,
+        access_category=None,
+        bandwidth=None,
+        stream_time=DEFAULT_STREAM_TIME,
+        start_time=None,
+    ):
         """Runs an iperf3 stream.
 
         1. Adds stream UUID to active_streams
@@ -426,34 +442,46 @@
             with utils.SuppressLogOutput():
                 client.start_ssh()
 
-        ac_flag = ''
-        bandwidth_flag = ''
-        time_flag = '-t %s' % stream_time
+        ac_flag = ""
+        bandwidth_flag = ""
+        time_flag = "-t %s" % stream_time
 
         if access_category:
-            ac_flag = ' -S %s' % DEFAULT_AC_TO_TOS_TAG_MAP[access_category]
+            ac_flag = " -S %s" % DEFAULT_AC_TO_TOS_TAG_MAP[access_category]
 
         if bandwidth:
-            bandwidth_flag = ' -u -b %sM' % bandwidth
+            bandwidth_flag = " -u -b %sM" % bandwidth
 
-        iperf_flags = '-p %s -i 1 %s%s%s -J' % (server_port, time_flag,
-                                                ac_flag, bandwidth_flag)
+        iperf_flags = "-p %s -i 1 %s%s%s -J" % (
+            server_port,
+            time_flag,
+            ac_flag,
+            bandwidth_flag,
+        )
         if not start_time:
             start_time = time.time()
-        time_str = datetime.fromtimestamp(start_time).strftime('%H:%M:%S.%f')
+        time_str = datetime.fromtimestamp(start_time).strftime("%H:%M:%S.%f")
         self.log.info(
-            'At %s, starting %s second stream to %s:%s with (AC: %s, Bandwidth: %s)'
-            % (time_str, stream_time, server_ip, server_port, access_category,
-               bandwidth if bandwidth else 'Unlimited'))
+            "At %s, starting %s second stream to %s:%s with (AC: %s, Bandwidth: %s)"
+            % (
+                time_str,
+                stream_time,
+                server_ip,
+                server_port,
+                access_category,
+                bandwidth if bandwidth else "Unlimited",
+            )
+        )
 
         # If present, wait for stream start time
         if start_time:
             current_time = time.time()
             while current_time < start_time:
                 current_time = time.time()
-        path = client.start(server_ip, iperf_flags, '%s' % uuid)
+        path = client.start(server_ip, iperf_flags, "%s" % uuid)
         stream_results[uuid] = iperf_server.IPerfResult(
-            path, reporting_speed_units='mbps')
+            path, reporting_speed_units="mbps"
+        )
 
         if type(client) == iperf_client.IPerfClientOverSsh:
             client.close_ssh()
@@ -484,8 +512,7 @@
             uuid: UUID object, identifier of the stream
         """
         if uuid in self._active_streams:
-            raise EnvironmentError('Resource still being used by stream %s' %
-                                   uuid)
+            raise EnvironmentError("Resource still being used by stream %s" % uuid)
         (receiver, server_port) = self._reserved_servers.pop(uuid)
         receiver._release_server(server_port)
         client = self._reserved_clients.pop(uuid)
@@ -555,7 +582,7 @@
                 break
         else:
             iperf_server_config = self._iperf_config
-            iperf_server_config.update({'port': self._next_server_port})
+            iperf_server_config.update({"port": self._next_server_port})
             self._next_server_port += 1
             reserved_server = iperf_server.create([iperf_server_config])[0]
             self._iperf_server_ports[reserved_server.port] = reserved_server
@@ -566,15 +593,18 @@
         while time.time() < end_time:
             if self.wlan_device:
                 addresses = utils.get_interface_ip_addresses(
-                    self.wlan_device.device, self._test_interface)
+                    self.wlan_device.device, self._test_interface
+                )
             else:
                 addresses = reserved_server.get_interface_ip_addresses(
-                    self._test_interface)
-            for addr in addresses['ipv4_private']:
+                    self._test_interface
+                )
+            for addr in addresses["ipv4_private"]:
                 if utils.ip_in_subnet(addr, subnet):
                     return (addr, reserved_server.port)
         raise AttributeError(
-            'Reserved server has no ipv4 address in the %s subnet' % subnet)
+            "Reserved server has no ipv4 address in the %s subnet" % subnet
+        )
 
     def _release_server(self, server_port):
         """Releases reservation on IPerfServer, which was held for a stream
@@ -589,7 +619,7 @@
         self._iperf_servers[server] = AVAILABLE
 
     def _validate_server_address(self, server_ip, uuid, timeout=60):
-        """ Verifies server address can be pinged before attempting to run
+        """Verifies server address can be pinged before attempting to run
         traffic, since iperf is unforgiving when the server is unreachable.
 
         Args:
@@ -600,24 +630,25 @@
         Raises:
             WmmTransceiverError, if, after timeout, server ip is unreachable.
         """
-        self.log.info('Verifying server address (%s) is reachable.' %
-                      server_ip)
+        self.log.info("Verifying server address (%s) is reachable." % server_ip)
         end_time = time.time() + timeout
         while time.time() < end_time:
             if self.can_ping(server_ip):
                 break
             else:
                 self.log.debug(
-                    'Could not ping server address (%s). Retrying in 1 second.'
-                    % (server_ip))
+                    "Could not ping server address (%s). Retrying in 1 second."
+                    % (server_ip)
+                )
                 time.sleep(1)
         else:
             self._return_stream_resources(uuid)
-            raise WmmTransceiverError('IPerfServer address (%s) unreachable.' %
-                                      server_ip)
+            raise WmmTransceiverError(
+                "IPerfServer address (%s) unreachable." % server_ip
+            )
 
     def can_ping(self, dest_ip):
-        """ Utilizes can_ping function in wlan_device or access_point device to
+        """Utilizes can_ping function in wlan_device or access_point device to
         ping dest_ip
 
         Args:
@@ -648,18 +679,18 @@
             (receiver, access_category, bandwidth, time) as
             (WmmTransceiver, String, int, int)
         """
-        receiver = stream_parameters['receiver']
-        access_category = stream_parameters.get('access_category', None)
-        bandwidth = stream_parameters.get('bandwidth', None)
-        time = stream_parameters.get('time', DEFAULT_STREAM_TIME)
+        receiver = stream_parameters["receiver"]
+        access_category = stream_parameters.get("access_category", None)
+        bandwidth = stream_parameters.get("bandwidth", None)
+        time = stream_parameters.get("time", DEFAULT_STREAM_TIME)
         return (receiver, access_category, bandwidth, time)
 
 
 class WmmTransceiverLoggerAdapter(logging.LoggerAdapter):
     def process(self, msg, kwargs):
-        if self.extra['identifier']:
-            log_identifier = ' | %s' % self.extra['identifier']
+        if self.extra["identifier"]:
+            log_identifier = " | %s" % self.extra["identifier"]
         else:
-            log_identifier = ''
+            log_identifier = ""
         msg = "[WmmTransceiver%s] %s" % (log_identifier, msg)
         return (msg, kwargs)
diff --git a/src/antlion/test_utils/audio_analysis_lib/__init__.py b/src/antlion/test_utils/audio_analysis_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/test_utils/audio_analysis_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/test_utils/audio_analysis_lib/audio_analysis.py b/src/antlion/test_utils/audio_analysis_lib/audio_analysis.py
deleted file mode 100644
index a4273c5..0000000
--- a/src/antlion/test_utils/audio_analysis_lib/audio_analysis.py
+++ /dev/null
@@ -1,669 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""This module provides utilities to do audio data analysis."""
-
-import logging
-import numpy
-import soundfile
-from scipy.signal import blackmanharris
-from scipy.signal import iirnotch
-from scipy.signal import lfilter
-
-# The default block size of pattern matching.
-ANOMALY_DETECTION_BLOCK_SIZE = 120
-
-# Only peaks with coefficient greater than 0.01 of the first peak should be
-# considered. Note that this correspond to -40dB in the spectrum.
-DEFAULT_MIN_PEAK_RATIO = 0.01
-
-# The minimum RMS value of meaningful audio data.
-MEANINGFUL_RMS_THRESHOLD = 0.001
-
-# The minimal signal norm value.
-_MINIMUM_SIGNAL_NORM = 0.001
-
-# The default pattern mathing threshold. By experiment, this threshold
-# can tolerate normal noise of 0.3 amplitude when sine wave signal
-# amplitude is 1.
-PATTERN_MATCHING_THRESHOLD = 0.85
-
-# The default number of samples within the analysis step size that the
-# difference between two anomaly time values can be to be grouped together.
-ANOMALY_GROUPING_TOLERANCE = 1.0
-
-# Window size for peak detection.
-PEAK_WINDOW_SIZE_HZ = 20
-
-
-class RMSTooSmallError(Exception):
-    """Error when signal RMS is too small."""
-
-
-class EmptyDataError(Exception):
-    """Error when signal is empty."""
-
-
-def normalize_signal(signal, saturate_value):
-    """Normalizes the signal with respect to the saturate value.
-
-    Args:
-        signal: A list for one-channel PCM data.
-        saturate_value: The maximum value that the PCM data might be.
-
-    Returns:
-        A numpy array containing normalized signal. The normalized signal has
-            value -1 and 1 when it saturates.
-
-    """
-    signal = numpy.array(signal)
-    return signal / float(saturate_value)
-
-
-def spectral_analysis(signal,
-                      rate,
-                      min_peak_ratio=DEFAULT_MIN_PEAK_RATIO,
-                      peak_window_size_hz=PEAK_WINDOW_SIZE_HZ):
-    """Gets the dominant frequencies by spectral analysis.
-
-    Args:
-        signal: A list of numbers for one-channel PCM data. This should be
-                   normalized to [-1, 1] so the function can check if signal RMS
-                   is too small to be meaningful.
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-        min_peak_ratio: The minimum peak_i/peak_0 ratio such that the
-                           peaks other than the greatest one should be
-                           considered.
-                           This is to ignore peaks that are too small compared
-                           to the first peak peak_0.
-        peak_window_size_hz: The window size in Hz to find the peaks.
-                                The minimum differences between found peaks will
-                                be half of this value.
-
-    Returns:
-        A list of tuples:
-              [(peak_frequency_0, peak_coefficient_0),
-               (peak_frequency_1, peak_coefficient_1),
-               (peak_frequency_2, peak_coefficient_2), ...]
-              where the tuples are sorted by coefficients. The last
-              peak_coefficient will be no less than peak_coefficient *
-              min_peak_ratio. If RMS is less than MEANINGFUL_RMS_THRESHOLD,
-              returns [(0, 0)].
-
-    """
-    # Checks the signal is meaningful.
-    if len(signal) == 0:
-        raise EmptyDataError('Signal data is empty')
-
-    signal_rms = numpy.linalg.norm(signal) / numpy.sqrt(len(signal))
-    logging.debug('signal RMS = %s', signal_rms)
-
-    # If RMS is too small, set dominant frequency and coefficient to 0.
-    if signal_rms < MEANINGFUL_RMS_THRESHOLD:
-        logging.warning(
-            'RMS %s is too small to be meaningful. Set frequency to 0.',
-            signal_rms)
-        return [(0, 0)]
-
-    logging.debug('Doing spectral analysis ...')
-
-    # First, pass signal through a window function to mitigate spectral leakage.
-    y_conv_w = signal * numpy.hanning(len(signal))
-
-    length = len(y_conv_w)
-
-    # x_f is the frequency in Hz, y_f is the transformed coefficient.
-    x_f = _rfft_freq(length, rate)
-    y_f = 2.0 / length * numpy.fft.rfft(y_conv_w)
-
-    # y_f is complex so consider its absolute value for magnitude.
-    abs_y_f = numpy.abs(y_f)
-    threshold = max(abs_y_f) * min_peak_ratio
-
-    # Suppresses all coefficients that are below threshold.
-    for i in range(len(abs_y_f)):
-        if abs_y_f[i] < threshold:
-            abs_y_f[i] = 0
-
-    # Gets the peak detection window size in indice.
-    # x_f[1] is the frequency difference per index.
-    peak_window_size = int(peak_window_size_hz / x_f[1])
-
-    # Detects peaks.
-    peaks = peak_detection(abs_y_f, peak_window_size)
-
-    # Transform back the peak location from index to frequency.
-    results = []
-    for index, value in peaks:
-        results.append((x_f[int(index)], value))
-    return results
-
-
-def _rfft_freq(length, rate):
-    """Gets the frequency at each index of real FFT.
-
-    Args:
-        length: The window length of FFT.
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-
-    Returns:
-        A numpy array containing frequency corresponding to numpy.fft.rfft
-            result at each index.
-
-    """
-    # The difference in Hz between each index.
-    val = rate / float(length)
-    # Only care half of frequencies for FFT on real signal.
-    result_length = length // 2 + 1
-    return numpy.linspace(0, (result_length - 1) * val, result_length)
-
-
-def peak_detection(array, window_size):
-    """Detects peaks in an array.
-
-    A point (i, array[i]) is a peak if array[i] is the maximum among
-    array[i - half_window_size] to array[i + half_window_size].
-    If array[i - half_window_size] to array[i + half_window_size] are all equal,
-    then there is no peak in this window.
-    Note that we only consider peak with value greater than 0.
-
-    Args:
-        array: The input array to detect peaks in. Array is a list of
-        absolute values of the magnitude of transformed coefficient.
-
-        window_size: The window to detect peaks.
-
-    Returns:
-        A list of tuples:
-              [(peak_index_1, peak_value_1), (peak_index_2, peak_value_2), ...]
-              where the tuples are sorted by peak values.
-
-    """
-    half_window_size = window_size / 2
-    length = len(array)
-
-    def mid_is_peak(array, mid, left, right):
-        """Checks if value at mid is the largest among left to right in array.
-
-        Args:
-            array: A list of numbers.
-            mid: The mid index.
-            left: The left index.
-            rigth: The right index.
-
-        Returns:
-            A tuple (is_peak, next_candidate)
-                  is_peak is True if array[index] is the maximum among numbers
-                  in array between index [left, right] inclusively.
-                  next_candidate is the index of next candidate for peak if
-                  is_peak is False. It is the index of maximum value in
-                  [mid + 1, right]. If is_peak is True, next_candidate is
-                  right + 1.
-
-        """
-        value_mid = array[int(mid)]
-        is_peak = True
-        next_peak_candidate_index = None
-
-        # Check the left half window.
-        for index in range(int(left), int(mid)):
-            if array[index] >= value_mid:
-                is_peak = False
-                break
-
-        # Mid is at the end of array.
-        if mid == right:
-            return is_peak, right + 1
-
-        # Check the right half window and also record next candidate.
-        # Favor the larger index for next_peak_candidate_index.
-        for index in range(int(right), int(mid), -1):
-            if (next_peak_candidate_index is None
-                    or array[index] > array[next_peak_candidate_index]):
-                next_peak_candidate_index = index
-
-        if array[next_peak_candidate_index] >= value_mid:
-            is_peak = False
-
-        if is_peak:
-            next_peak_candidate_index = right + 1
-
-        return is_peak, next_peak_candidate_index
-
-    results = []
-    mid = 0
-    next_candidate_idx = None
-    while mid < length:
-        left = max(0, mid - half_window_size)
-        right = min(length - 1, mid + half_window_size)
-
-        # Only consider value greater than 0.
-        if array[int(mid)] == 0:
-            mid = mid + 1
-            continue
-
-        is_peak, next_candidate_idx = mid_is_peak(array, mid, left, right)
-
-        if is_peak:
-            results.append((mid, array[int(mid)]))
-
-        # Use the next candidate found in [mid + 1, right], or right + 1.
-        mid = next_candidate_idx
-
-    # Sort the peaks by values.
-    return sorted(results, key=lambda x: x[1], reverse=True)
-
-
-def anomaly_detection(signal,
-                      rate,
-                      freq,
-                      block_size=ANOMALY_DETECTION_BLOCK_SIZE,
-                      threshold=PATTERN_MATCHING_THRESHOLD):
-    """Detects anomaly in a sine wave signal.
-
-    This method detects anomaly in a sine wave signal by matching
-    patterns of each block.
-    For each moving window of block in the test signal, checks if there
-    is any block in golden signal that is similar to this block of test signal.
-    If there is such a block in golden signal, then this block of test
-    signal is matched and there is no anomaly in this block of test signal.
-    If there is any block in test signal that is not matched, then this block
-    covers an anomaly.
-    The block of test signal starts from index 0, and proceeds in steps of
-    half block size. The overlapping of test signal blocks makes sure there must
-    be at least one block covering the transition from sine wave to anomaly.
-
-    Args:
-        signal: A 1-D array-like object for 1-channel PCM data.
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-        freq: The expected frequency of signal.
-        block_size: The block size in samples to detect anomaly.
-        threshold: The threshold of correlation index to be judge as matched.
-
-    Returns:
-        A list containing time markers in seconds that have an anomaly within
-            block_size samples.
-
-    """
-    if len(signal) == 0:
-        raise EmptyDataError('Signal data is empty')
-
-    golden_y = _generate_golden_pattern(rate, freq, block_size)
-
-    results = []
-
-    for start in range(0, len(signal), int(block_size / 2)):
-        end = start + block_size
-        test_signal = signal[start:end]
-        matched = _moving_pattern_matching(golden_y, test_signal, threshold)
-        if not matched:
-            results.append(start)
-
-    results = [float(x) / rate for x in results]
-
-    return results
-
-
-def get_anomaly_durations(signal,
-                          rate,
-                          freq,
-                          block_size=ANOMALY_DETECTION_BLOCK_SIZE,
-                          threshold=PATTERN_MATCHING_THRESHOLD,
-                          tolerance=ANOMALY_GROUPING_TOLERANCE):
-    """Detect anomalies in a sine wav and return their start and end times.
-
-    Run anomaly_detection function and parse resulting array of time values into
-    discrete anomalies defined by a start and end time tuple. Time values are
-    judged to be part of the same anomaly if they lie within a given tolerance
-    of half the block_size number of samples of each other.
-
-    Args:
-        signal: A 1-D array-like object for 1-channel PCM data.
-        rate (int): Sampling rate in samples per second.
-            Example inputs: 44100, 48000
-        freq (int): The expected frequency of signal.
-        block_size (int): The block size in samples to detect anomaly.
-        threshold (float): The threshold of correlation index to be judge as
-            matched.
-        tolerance (float): The number of samples greater than block_size / 2
-            that the sample distance between two anomaly time values can be and
-            still be grouped as the same anomaly.
-    Returns:
-        bounds (list): a list of (start, end) tuples where start and end are the
-            boundaries in seconds of the detected anomaly.
-    """
-    bounds = []
-    anoms = anomaly_detection(signal, rate, freq, block_size, threshold)
-    if len(anoms) == 0:
-        return bounds
-    end = anoms[0]
-    start = anoms[0]
-    for i in range(len(anoms) - 1):
-        end = anoms[i]
-        sample_diff = abs(anoms[i] - anoms[i + 1]) * rate
-        # We require a tolerance because sample_diff may be slightly off due to
-        # float rounding errors in Python.
-        if sample_diff > block_size / 2 + tolerance:
-            bounds.append((start, end))
-            start = anoms[i + 1]
-    bounds.append((start, end))
-    return bounds
-
-
-def _generate_golden_pattern(rate, freq, block_size):
-    """Generates a golden pattern of certain frequency.
-
-    The golden pattern must cover all the possibilities of waveforms in a
-    block. So, we need a golden pattern covering 1 period + 1 block size,
-    such that the test block can start anywhere in a period, and extends
-    a block size.
-
-    |period |1 bk|
-    |       |    |
-     . .     . .
-    .   .   .   .
-         . .     .
-
-    Args:
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-        freq: The frequency of golden pattern.
-        block_size: The block size in samples to detect anomaly.
-
-    Returns:
-        A 1-D array for golden pattern.
-
-    """
-    samples_in_a_period = int(rate / freq) + 1
-    samples_in_golden_pattern = samples_in_a_period + block_size
-    golden_x = numpy.linspace(0.0,
-                              (samples_in_golden_pattern - 1) * 1.0 / rate,
-                              samples_in_golden_pattern)
-    golden_y = numpy.sin(freq * 2.0 * numpy.pi * golden_x)
-    return golden_y
-
-
-def _moving_pattern_matching(golden_signal, test_signal, threshold):
-    """Checks if test_signal is similar to any block of golden_signal.
-
-    Compares test signal with each block of golden signal by correlation
-    index. If there is any block of golden signal that is similar to
-    test signal, then it is matched.
-
-    Args:
-        golden_signal: A 1-D array for golden signal.
-        test_signal: A 1-D array for test signal.
-        threshold: The threshold of correlation index to be judge as matched.
-
-    Returns:
-        True if there is a match. False otherwise.
-
-        ValueError: if test signal is longer than golden