diff --git a/.editorconfig b/.editorconfig
new file mode 100644
index 0000000..d7d9cae
--- /dev/null
+++ b/.editorconfig
@@ -0,0 +1,13 @@
+# EditorConfig is awesome: https://EditorConfig.org
+
+root = true
+
+[*]
+end_of_line = lf
+insert_final_newline = true
+trim_trailing_whitespace = true
+
+[*.py]
+charset = utf-8
+indent_style = space
+indent_size = 4
diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
new file mode 100644
index 0000000..befa060
--- /dev/null
+++ b/.git-blame-ignore-revs
@@ -0,0 +1,5 @@
+# Run code through yapf
+19a821d5f1ff9079f9a40d27553182a433a27834
+
+# Run code through black
+0d9e3581d57f376865f49ae62fe9171789beca56
diff --git a/.gitignore b/.gitignore
index 029341d..767654b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,90 +1,46 @@
+#
+# OS-specific
+#
+
 .DS_Store
-# Byte-compiled / optimized / DLL files
-__pycache__/
+
+#
+# Language specific
+#
+
+# Python
 *.py[cod]
-*$py.class
-
-# C extensions
-*.so
-
-# Distribution / packaging
-.Python
-env/
-build/
-develop-eggs/
-dist/
-downloads/
-eggs/
-.eggs/
-lib/
-lib64/
-parts/
-sdist/
-var/
 *.egg-info/
-.installed.cfg
-*.egg
-
-# PyInstaller
-#  Usually these files are written by a python script from a template
-#  before PyInstaller builds the exe, so as to inject date/other infos into it.
-*.manifest
-*.spec
-
-# Installer logs
-pip-log.txt
-pip-delete-this-directory.txt
-
-# Unit test / coverage reports
-htmlcov/
-.tox/
-.coverage
-.coverage.*
-.cache
-nosetests.xml
-coverage.xml
-*,cover
-.hypothesis/
-
-# Translations
-*.mo
-*.pot
-
-# Django stuff:
-*.log
-local_settings.py
-
-# Sphinx documentation
-docs/_build/
-
-# PyBuilder
-target/
-
-#Ipython Notebook
-.ipynb_checkpoints
-
-# pyenv
-.python-version
-
-# PyCharm
-.idea/
-
-# IntelliJ
-*.iml
-
-# VSCode
-/.vscode
-
-# Python virtual environment
+/build/
 /.venv
 
-# antlion configuration files
+#
+# Editors
+#
+
+/.idea/
+/.vscode/
+
+#
+# antlion
+#
+
+# Configuration
 /*.json
 /*.yaml
 /config/
 
-# antlion runtime files
+# Generated during run-time
 /logs
 
 # Local development scripts
 /*.sh
+
+#
+# third_party
+#
+
+/third_party/*
+!/third_party/github.com/
+!/third_party/github.com/jd/tenacity
+/third_party/github.com/jd/tenacity/src
diff --git a/BUILD.gn b/BUILD.gn
new file mode 100644
index 0000000..f2aab56
--- /dev/null
+++ b/BUILD.gn
@@ -0,0 +1,228 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Declare Fuchsia build targets for using antlion from the Fuchsia tree.
+# Requires additional configuration of jiri fetch attributes from your Fuchsia
+# checkout:
+#   `jiri init -fetch-optional=antlion`
+
+import("//build/python/python_library.gni")
+
+# Tests for full build validation
+group("e2e_tests") {
+  testonly = true
+  public_deps = [ "src/antlion/tests:e2e_tests" ]
+}
+
+# Subset of tests to validate builds in under 15 minutes.
+group("e2e_tests_quick") {
+  testonly = true
+  public_deps = [ "src/antlion/tests:e2e_tests_quick" ]
+}
+
+# Tests for at-desk custom validation
+group("e2e_tests_manual") {
+  testonly = true
+  public_deps = [ "src/antlion/tests:e2e_tests_manual" ]
+}
+
+# deprecated: prefer e2e_tests_quick
+group("smoke_tests") {
+  testonly = true
+  public_deps = [ ":e2e_tests_quick" ]
+}
+
+# Unit tests only
+group("tests") {
+  testonly = true
+  public_deps = [ "runner:tests" ]
+}
+
+python_library("antlion") {
+  source_root = "//third_party/antlion/src/antlion"
+  sources = [
+    "__init__.py",
+    "base_test.py",
+    "bin/__init__.py",
+    "bin/act.py",
+    "capabilities/__init__.py",
+    "capabilities/ssh.py",
+    "config_parser.py",
+    "context.py",
+    "controllers/__init__.py",
+    "controllers/access_point.py",
+    "controllers/adb.py",
+    "controllers/adb_lib/__init__.py",
+    "controllers/adb_lib/error.py",
+    "controllers/android_device.py",
+    "controllers/android_lib/__init__.py",
+    "controllers/android_lib/errors.py",
+    "controllers/android_lib/events.py",
+    "controllers/android_lib/logcat.py",
+    "controllers/android_lib/services.py",
+    "controllers/android_lib/tel/__init__.py",
+    "controllers/android_lib/tel/tel_utils.py",
+    "controllers/ap_lib/__init__.py",
+    "controllers/ap_lib/ap_get_interface.py",
+    "controllers/ap_lib/ap_iwconfig.py",
+    "controllers/ap_lib/bridge_interface.py",
+    "controllers/ap_lib/dhcp_config.py",
+    "controllers/ap_lib/dhcp_server.py",
+    "controllers/ap_lib/extended_capabilities.py",
+    "controllers/ap_lib/hostapd.py",
+    "controllers/ap_lib/hostapd_ap_preset.py",
+    "controllers/ap_lib/hostapd_bss_settings.py",
+    "controllers/ap_lib/hostapd_config.py",
+    "controllers/ap_lib/hostapd_constants.py",
+    "controllers/ap_lib/hostapd_security.py",
+    "controllers/ap_lib/hostapd_utils.py",
+    "controllers/ap_lib/radio_measurement.py",
+    "controllers/ap_lib/radvd.py",
+    "controllers/ap_lib/radvd_config.py",
+    "controllers/ap_lib/radvd_constants.py",
+    "controllers/ap_lib/third_party_ap_profiles/__init__.py",
+    "controllers/ap_lib/third_party_ap_profiles/actiontec.py",
+    "controllers/ap_lib/third_party_ap_profiles/asus.py",
+    "controllers/ap_lib/third_party_ap_profiles/belkin.py",
+    "controllers/ap_lib/third_party_ap_profiles/linksys.py",
+    "controllers/ap_lib/third_party_ap_profiles/netgear.py",
+    "controllers/ap_lib/third_party_ap_profiles/securifi.py",
+    "controllers/ap_lib/third_party_ap_profiles/tplink.py",
+    "controllers/ap_lib/wireless_network_management.py",
+    "controllers/attenuator.py",
+    "controllers/attenuator_lib/__init__.py",
+    "controllers/attenuator_lib/_tnhelper.py",
+    "controllers/attenuator_lib/aeroflex/__init__.py",
+    "controllers/attenuator_lib/aeroflex/telnet.py",
+    "controllers/attenuator_lib/minicircuits/__init__.py",
+    "controllers/attenuator_lib/minicircuits/http.py",
+    "controllers/attenuator_lib/minicircuits/telnet.py",
+    "controllers/fastboot.py",
+    "controllers/fuchsia_device.py",
+    "controllers/fuchsia_lib/__init__.py",
+    "controllers/fuchsia_lib/base_lib.py",
+    "controllers/fuchsia_lib/device_lib.py",
+    "controllers/fuchsia_lib/ffx.py",
+    "controllers/fuchsia_lib/hardware_power_statecontrol_lib.py",
+    "controllers/fuchsia_lib/lib_controllers/__init__.py",
+    "controllers/fuchsia_lib/lib_controllers/netstack_controller.py",
+    "controllers/fuchsia_lib/lib_controllers/wlan_controller.py",
+    "controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py",
+    "controllers/fuchsia_lib/location/__init__.py",
+    "controllers/fuchsia_lib/location/regulatory_region_lib.py",
+    "controllers/fuchsia_lib/logging_lib.py",
+    "controllers/fuchsia_lib/netstack/__init__.py",
+    "controllers/fuchsia_lib/netstack/netstack_lib.py",
+    "controllers/fuchsia_lib/package_server.py",
+    "controllers/fuchsia_lib/sl4f.py",
+    "controllers/fuchsia_lib/ssh.py",
+    "controllers/fuchsia_lib/utils_lib.py",
+    "controllers/fuchsia_lib/wlan_ap_policy_lib.py",
+    "controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py",
+    "controllers/fuchsia_lib/wlan_lib.py",
+    "controllers/fuchsia_lib/wlan_policy_lib.py",
+    "controllers/iperf_client.py",
+    "controllers/iperf_server.py",
+    "controllers/openwrt_ap.py",
+    "controllers/openwrt_lib/__init__.py",
+    "controllers/openwrt_lib/network_const.py",
+    "controllers/openwrt_lib/network_settings.py",
+    "controllers/openwrt_lib/openwrt_constants.py",
+    "controllers/openwrt_lib/wireless_config.py",
+    "controllers/openwrt_lib/wireless_settings_applier.py",
+    "controllers/pdu.py",
+    "controllers/pdu_lib/__init__.py",
+    "controllers/pdu_lib/digital_loggers/__init__.py",
+    "controllers/pdu_lib/digital_loggers/webpowerswitch.py",
+    "controllers/pdu_lib/synaccess/__init__.py",
+    "controllers/pdu_lib/synaccess/np02b.py",
+    "controllers/sl4a_lib/__init__.py",
+    "controllers/sl4a_lib/error_reporter.py",
+    "controllers/sl4a_lib/event_dispatcher.py",
+    "controllers/sl4a_lib/rpc_client.py",
+    "controllers/sl4a_lib/rpc_connection.py",
+    "controllers/sl4a_lib/sl4a_manager.py",
+    "controllers/sl4a_lib/sl4a_ports.py",
+    "controllers/sl4a_lib/sl4a_session.py",
+    "controllers/sl4a_lib/sl4a_types.py",
+    "controllers/sniffer.py",
+    "controllers/sniffer_lib/__init__.py",
+    "controllers/sniffer_lib/local/__init__.py",
+    "controllers/sniffer_lib/local/local_base.py",
+    "controllers/sniffer_lib/local/tcpdump.py",
+    "controllers/sniffer_lib/local/tshark.py",
+    "controllers/utils_lib/__init__.py",
+    "controllers/utils_lib/commands/__init__.py",
+    "controllers/utils_lib/commands/ip.py",
+    "controllers/utils_lib/commands/route.py",
+    "controllers/utils_lib/commands/shell.py",
+    "controllers/utils_lib/host_utils.py",
+    "controllers/utils_lib/ssh/__init__.py",
+    "controllers/utils_lib/ssh/connection.py",
+    "controllers/utils_lib/ssh/formatter.py",
+    "controllers/utils_lib/ssh/settings.py",
+    "dict_object.py",
+    "error.py",
+    "event/__init__.py",
+    "event/decorators.py",
+    "event/event.py",
+    "event/event_bus.py",
+    "event/event_subscription.py",
+    "event/subscription_bundle.py",
+    "event/subscription_handle.py",
+    "keys.py",
+    "libs/__init__.py",
+    "libs/logging/__init__.py",
+    "libs/logging/log_stream.py",
+    "libs/ota/__init__.py",
+    "libs/ota/ota_runners/__init__.py",
+    "libs/ota/ota_runners/ota_runner.py",
+    "libs/ota/ota_runners/ota_runner_factory.py",
+    "libs/ota/ota_tools/__init__.py",
+    "libs/ota/ota_tools/adb_sideload_ota_tool.py",
+    "libs/ota/ota_tools/ota_tool.py",
+    "libs/ota/ota_tools/ota_tool_factory.py",
+    "libs/ota/ota_tools/update_device_ota_tool.py",
+    "libs/ota/ota_updater.py",
+    "libs/proc/__init__.py",
+    "libs/proc/job.py",
+    "libs/proc/process.py",
+    "libs/yaml_writer.py",
+    "logger.py",
+    "net.py",
+    "records.py",
+    "signals.py",
+    "test_decorators.py",
+    "test_runner.py",
+    "test_utils/__init__.py",
+    "test_utils/abstract_devices/__init__.py",
+    "test_utils/abstract_devices/wlan_device.py",
+    "test_utils/abstract_devices/wmm_transceiver.py",
+    "test_utils/dhcp/__init__.py",
+    "test_utils/dhcp/base_test.py",
+    "test_utils/fuchsia/__init__.py",
+    "test_utils/fuchsia/utils.py",
+    "test_utils/fuchsia/wmm_test_cases.py",
+    "test_utils/net/__init__.py",
+    "test_utils/net/connectivity_const.py",
+    "test_utils/net/net_test_utils.py",
+    "test_utils/wifi/__init__.py",
+    "test_utils/wifi/base_test.py",
+    "test_utils/wifi/wifi_constants.py",
+    "test_utils/wifi/wifi_performance_test_utils/__init__.py",
+    "test_utils/wifi/wifi_performance_test_utils/bokeh_figure.py",
+    "test_utils/wifi/wifi_performance_test_utils/brcm_utils.py",
+    "test_utils/wifi/wifi_performance_test_utils/ping_utils.py",
+    "test_utils/wifi/wifi_performance_test_utils/qcom_utils.py",
+    "test_utils/wifi/wifi_power_test_utils.py",
+    "test_utils/wifi/wifi_test_utils.py",
+    "tracelogger.py",
+    "utils.py",
+  ]
+  library_deps = [
+    "//third_party/mobly",
+    "//third_party/pyyaml:yaml",
+    "third_party/github.com/jd/tenacity",
+  ]
+}
diff --git a/CHANGELOG.md b/CHANGELOG.md
index a9c7f67..248b51f 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -10,20 +10,65 @@
 
 ## [Unreleased]
 
+## 0.3.0 - 2023-05-17
+
+### Deprecated
+
+- **Support for ACTS JSON configs; instead, use Mobly YAML configs.** To
+ease this transition, upon running `act.py`, a compatible YAML config will be
+generated for you and placed next to your JSON config.
+- **The `act.py` binary; instead, invoke tests directly.** Upon running
+`act.py`, a deprecation warning will provide instructions for how to invoke
+antlion tests without act.py and with the newly generated YAML config.
+
 ### Added
 
+- Presubmit testing in [CV][CV] (aka CQ). All tests specified with the
+`qemu_env` environment will run before every antlion CL is submitted.
+- Postsubmit testing in [CI][CI]. See [Milo][builders] for an exhaustive list of
+builders.
+- [EditorConfig](https://editorconfig.org) file for consistent coding styles.
+Installing an EditorConfig plugin for your editor is highly recommended.
+
+[CV]: https://chromium.googlesource.com/infra/luci/luci-go/+/refs/heads/main/cv/README.md
+[CI]: https://chromium.googlesource.com/chromium/src/+/master/docs/tour_of_luci_ui.md
+[builders]: https://luci-milo.appspot.com/ui/search?q=antlion
+
 ### Changed
 
+- Default test execution from ACTS to Mobly. `antlion_host_test()` now invokes
+the test file directly using the Mobly test runner, rather than using `act.py`.
+  - All tests have been refactored to allow direct running with the Mobly test
+  runner.
+  - `act.py` now converts ACTS JSON config to compatible Mobly YAML config. The
+  resulting config is passed directly to Mobly's config parser. See notes for
+  this release's deprecations above.
+- Generate YAML config instead of JSON config from antlion-runner.
+- `FuchsiaDevice.authorized_file_loc` config field is now optional. This field
+is only used during `FlashTest`; it is not used when the device is already
+provisioned (e.g. when tests are dispatched in Fuchsia infrastructure).
+
 ### Removed
 
+- Unused controllers and tests (full list)
+
 ### Fixed
-
 [unreleased]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.2.0..refs/heads/main
+- Failure to stop session_manager using ffx in `WlanRebootTest` ([@patricklu],
+[bug](http://b/267330535))
+- Failure to parse 'test_name' in DHCP configuration file in `Dhcpv4InteropTest`
+(invalid option) introduced by previous refactor ([@patricklu],
+[bug](http://b/232574848))
+- Logging for `Dhcpv4InteropTest` changed to utilize a temp file instead of
+/var/log/messages to fix test error with duplicate PID log messages
+([@patricklu], [bug](http://b/232574848))
 
-## [0.2.0] - 2022-01-03
+## [0.2.0] - 2023-01-03
 
 ### Added
 
+- Added snapshots before reboot and during test teardown in `WlanRebootTest`
+([@patricklu], [bug](http://b/273923552))
 - Download radvd logs from AP for debugging IPv6 address allocation
 - Optional `wlan_features` config field to `FuchsiaDevice` for declaring which
 WLAN features the device supports, such as BSS Transition Management
diff --git a/README.md b/README.md
index be529cf..7d5950b 100644
--- a/README.md
+++ b/README.md
@@ -11,7 +11,85 @@
 [Report Bug]: http://go/conn-test-bug
 [Request Feature]: http://b/issues/new?component=1182297&template=1680893
 
-## Getting Started
+## Getting started with QEMU
+
+The quickest way to run antlion is by using the Fuchsia QEMU emulator. This
+enables antlion tests that do not require hardware-specific capabilities like
+WLAN. This is especially useful to verify if antlion builds and runs without
+syntax errors. If you require WLAN capabilities, see
+[below](#running-with-a-physical-device).
+
+1. [Checkout Fuchsia](https://fuchsia.dev/fuchsia-src/get-started/get_fuchsia_source)
+
+2. Configure and build Fuchsia to run antlion tests virtually on QEMU
+
+   ```sh
+   fx set core.qemu-x64 \
+      --with //src/testing/sl4f \
+      --with //src/sys/bin/start_sl4f \
+      --args 'core_realm_shards += [ "//src/testing/sl4f:sl4f_core_shard" ]' \
+      --with-host //third_party/antlion:e2e_tests_quick
+   fx build
+   ```
+
+3. In a separate terminal, run the emulator with networking enabled
+
+   ```sh
+   ffx emu stop && ffx emu start -H --net tap && ffx log
+   ```
+
+4. In a separate terminal, run a package server
+
+   ```sh
+   fx serve
+   ```
+
+5. Run an antlion test
+
+   ```sh
+   fx test --e2e --output //third_party/antlion/src/antlion/tests/examples:sl4f_sanity_test
+   ```
+
+## Running with a local physical device
+
+A physical device is required for most antlion tests, which rely on physical I/O
+such as WLAN and Bluetooth. Antlion is designed to make testing physical devices
+as easy, reliable, and reproducible as possible. The device will be discovered
+using mDNS, so make sure your host machine has a network connection to the
+device.
+
+1. Configure and build Fuchsia for your target with the following extra
+   arguments:
+
+   ```sh
+   fx set core.my-super-cool-product \
+      --with //src/testing/sl4f \
+      --with //src/sys/bin/start_sl4f \
+      --args='core_realm_shards += [ "//src/testing/sl4f:sl4f_core_shard" ]' \
+      --with-host //third_party/antlion:e2e_tests
+   fx build
+   ```
+
+2. Flash your device with the new build
+
+3. In a separate terminal, run a package server
+
+   ```sh
+   fx serve
+   ```
+
+4. Run an antlion test
+
+   ```sh
+   fx test --e2e --output //third_party/antlion/src/antlion/tests/functional:ping_stress_test
+   ```
+
+> Local auxiliary devices are not yet support by `antlion-runner`, which is
+> responsible for generating Mobly configs. In the meantime, see the
+> section below for manually crafting Mobly configs to support auxiliary
+> devices.
+
+## Running without a Fuchsia checkout
 
 Requires Python 3.8+
 
@@ -25,52 +103,77 @@
 
    ```sh
    cd antlion
-   python3 -m venv .venv  # creates a "virtual environment" in the `.venv` directory
-   source .venv/bin/activate  # activates the virtual environment. Run `deactivate` to exit it later
-   pip install --editable ".[dev,test]"
+   python3 -m venv .venv      # Create a virtual environment in the `.venv` directory
+   source .venv/bin/activate  # Activate the virtual environment
+   pip install --editable ".[mdns]"
+   # Run `deactivate` later to exit the virtual environment
    ```
 
 3. Write the sample config and update the Fuchsia controller to match your
    development environment
 
    ```sh
-   mkdir -p config
-   cat <<EOF > config/simple.json
-   {
-      "testbed": [{
-         "name": "simple_testbed",
-         "FuchsiaDevice": [{
-            "ip": "fuchsia-00e0-4c01-04df"
-         }]
-      }],
-      "logpath": "logs"
-   }
+   cat <<EOF > simple-config.yaml
+   TestBeds:
+   - Name: antlion-runner
+     Controllers:
+       FuchsiaDevice:
+       - ip: fuchsia-00e0-4c01-04df
+   MoblyParams:
+     LogPath: logs
    EOF
    ```
 
+   Replace `fuchsia-00e0-4c01-04df` with your device's nodename, or
+   `fuchsia-emulator` if using an emulator. The nodename can be found by looking
+   for a log similar to the one below.
+
+   ```text
+   [0.524][klog][klog][I] netsvc: nodename='fuchsia-emulator'
+   ```
+
 4. Run the sanity test
 
    ```sh
-   antlion -c config/simple.json -tc Sl4fSanityTest
+   python src/antlion/tests/examples/Sl4fSanityTest.py -c simple-config.yaml
    ```
 
-See `antlion -h` for more full usage.
-
 ## Contributing
 
-Contributions are what make open source a great place to learn, inspire, and
-create. Any contributions you make are **greatly appreciated**.
+Contributions are what make open source projects a great place to learn,
+inspire, and create. Any contributions you make are **greatly appreciated**.
+If you have a suggestion that would make this better, please create a CL.
 
-If you have a suggestion that would make this better, please create a pull
-request.
+Before contributing, additional setup is necessary:
 
-1. Create a feature branch (`git checkout -b feature/amazing-feature`)
-2. Document your change in `CHANGELOG.md`
-3. Commit changes (`git commit -m 'Add some amazing feature'`)
-4. Upload CL (`git push origin HEAD:refs/for/main`)
+- Install developer Python packages for formatting and linting
+
+  ```sh
+  pip install --editable ".[dev]"
+  ```
+
+- Install an [EditorConfig](https://editorconfig.org/) plugin for consistent
+  whitespace
+
+- Install [Black](https://pypi.org/project/black/) our preferred code formatter.
+  Optionally, add the extension to your editor.
+
+- Complete the steps in '[Contribute source changes]' to gain authorization to
+  upload CLs to Fuchsia's Gerrit.
+
+To create a CL:
+
+1. Create a branch (`git checkout -b feature/amazing-feature`)
+2. Make changes
+3. Document the changes in `CHANGELOG.md`
+4. Run your change through `Black` formatter
+5. Commit changes (`git add . && git commit -m 'Add some amazing feature'`)
+6. Upload CL (`git push origin HEAD:refs/for/main`)
 
 > A public bug tracker is not (yet) available.
 
+[Contribute source changes]: https://fuchsia.dev/fuchsia-src/development/source_code/contribute_changes#prerequisites
+
 ### Recommended git aliases
 
 There are a handful of git commands that will be commonly used throughout the
@@ -87,6 +190,13 @@
   uc = push origin HEAD:refs/for/main%l=Commit-Queue+1,l=Fuchsia-Auto-Submit+1,publish-comments,r=sbalana
 ```
 
+You may also want to add a section to ignore the project's large formatting changes:
+
+```gitconfig
+[blame]
+  ignoreRevsFile = .git-blame-ignore-revs
+```
+
 ## License
 
 Distributed under the Apache 2.0 License. See `LICENSE` for more information.
diff --git a/antlion_host_test.gni b/antlion_host_test.gni
new file mode 100644
index 0000000..96f7654
--- /dev/null
+++ b/antlion_host_test.gni
@@ -0,0 +1,159 @@
+import("//build/python/python_binary.gni")
+import("//build/rust/rustc_binary.gni")
+import("//build/testing/host_test.gni")
+import("//build/testing/host_test_data.gni")
+
+# Declares a host-side antlion test.
+#
+# Example
+#
+# ```
+# antlion_host_test("Sl4fSanityTest") {
+#   main_source = "Sl4fSanityTest.py"
+# }
+# ```
+#
+# Parameters
+#
+#  main_source
+#    The .py file defining the antlion test.
+#    Type: path
+#
+#  sources (optional)
+#    Other files that are used in the test.
+#    Type: list(path)
+#    Default: empty list
+#
+#  test_params (optional)
+#    Path to a YAML file with additional test parameters. This will be provided
+#    to the test in the antlion config under the "test_params" key.
+#    Type: string
+#
+#  extra_args (optional)
+#    Additional arguments to pass to the test.
+#    Type: list(string)
+#
+#   deps
+#   environments
+#   visibility
+template("antlion_host_test") {
+  assert(defined(invoker.main_source), "main_source is required")
+
+  #
+  # Define antlion test python_binary().
+  #
+  _python_binary_name = "${target_name}.pyz"
+  _python_binary_target = "${target_name}_python_binary"
+  python_binary(_python_binary_target) {
+    forward_variables_from(invoker,
+                           [
+                             "main_source",
+                             "sources",
+                           ])
+    output_name = _python_binary_name
+    main_callable = "test_runner.main" # Mobly-specific entry point.
+    deps = [ "//third_party/antlion" ]
+    testonly = true
+    visibility = [ ":*" ]
+  }
+
+  _test_dir = "${root_out_dir}/test_data/" + get_label_info(target_name, "dir")
+
+  #
+  # Define antlion test host_test_data().
+  #
+  _host_test_data_target = "${target_name}_test_data"
+  host_test_data(_host_test_data_target) {
+    testonly = true
+    visibility = [ ":*" ]
+    sources = [ get_label_info(":${_python_binary_target}", "target_out_dir") +
+                "/${_python_binary_name}" ]
+    outputs = [ "${_test_dir}/${_python_binary_name}" ]
+    deps = [ ":${_python_binary_target}" ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+  }
+
+  #
+  # Define SSH binary host_test_data().
+  #
+  _host_test_data_ssh = "${target_name}_test_data_ssh"
+  host_test_data(_host_test_data_ssh) {
+    testonly = true
+    visibility = [ ":*" ]
+    sources = [ "//prebuilt/third_party/openssh-portable/${host_os}-${host_cpu}/bin/ssh" ]
+    outputs = [ "${_test_dir}/ssh" ]
+  }
+
+  #
+  # Define Mobly test params YAML host_test_data().
+  #
+  if (defined(invoker.test_params)) {
+    _host_test_data_test_params = "${target_name}_test_data_test_params"
+    host_test_data(_host_test_data_test_params) {
+      testonly = true
+      visibility = [ ":*" ]
+      sources = [ invoker.test_params ]
+      outputs = [ "${_test_dir}/${invoker.test_params}" ]
+    }
+  }
+
+  #
+  # Define FFX binary host_test_data().
+  #
+  _host_test_data_ffx = "${target_name}_test_data_ffx"
+  host_test_data(_host_test_data_ffx) {
+    testonly = true
+    visibility = [ ":*" ]
+    sources = [ get_label_info("//src/developer/ffx", "root_out_dir") + "/ffx" ]
+    outputs = [ "${_test_dir}/ffx" ]
+    deps = [ "//src/developer/ffx:ffx_bin($host_toolchain)" ]
+  }
+
+  #
+  # Define the antlion host_test() using antlion-runner.
+  #
+  host_test(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "environments",
+                             "visibility",
+                           ])
+
+    binary_path = "${root_out_dir}/antlion-runner"
+
+    args = [
+      "--python-bin",
+      rebase_path(python_exe_src, root_build_dir),
+      "--antlion-pyz",
+      rebase_path("${_test_dir}/${_python_binary_name}", root_build_dir),
+      "--out-dir",
+      rebase_path("${_test_dir}", root_build_dir),
+      "--ffx-binary",
+      rebase_path("${_test_dir}/ffx", root_build_dir),
+      "--ssh-binary",
+      rebase_path("${_test_dir}/ssh", root_build_dir),
+    ]
+
+    deps = [
+      ":${_host_test_data_ffx}",
+      ":${_host_test_data_ssh}",
+      ":${_host_test_data_target}",
+      "//build/python:interpreter",
+      "//third_party/antlion/runner",
+    ]
+
+    if (defined(invoker.test_params)) {
+      args += [
+        "--test-params",
+        rebase_path("${_test_dir}/${invoker.test_params}", root_build_dir),
+      ]
+      deps += [ ":${_host_test_data_test_params}" ]
+    }
+
+    if (defined(invoker.extra_args)) {
+      args += invoker.extra_args
+    }
+  }
+}
diff --git a/environments.gni b/environments.gni
new file mode 100644
index 0000000..2bdfb53
--- /dev/null
+++ b/environments.gni
@@ -0,0 +1,141 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+antlion_astro_env = {
+  dimensions = {
+    device_type = "Astro"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+antlion_sherlock_env = {
+  dimensions = {
+    device_type = "Sherlock"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+antlion_nelson_env = {
+  dimensions = {
+    device_type = "Nelson"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+astro_ap_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Astro"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+astro_ap_iperf_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Astro"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+astro_ap_iperf_attenuator_env = {
+  dimensions = {
+    access_points = "1"
+    attenuators = "1"
+    device_type = "Astro"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+sherlock_ap_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Sherlock"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+sherlock_ap_iperf_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Sherlock"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+sherlock_ap_iperf_attenuator_env = {
+  dimensions = {
+    access_points = "1"
+    attenuators = "1"
+    device_type = "Sherlock"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+nelson_ap_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Nelson"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+nelson_ap_iperf_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Nelson"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+nelson_ap_iperf_attenuator_env = {
+  dimensions = {
+    access_points = "1"
+    attenuators = "1"
+    device_type = "Nelson"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+display_envs = [
+  antlion_astro_env,
+  antlion_sherlock_env,
+  antlion_nelson_env,
+]
+
+display_ap_envs = [
+  astro_ap_env,
+  sherlock_ap_env,
+  nelson_ap_env,
+]
+
+display_ap_iperf_envs = [
+  astro_ap_iperf_env,
+  sherlock_ap_iperf_env,
+  nelson_ap_iperf_env,
+]
+
+display_ap_iperf_attenuator_envs = [
+  astro_ap_iperf_attenuator_env,
+  sherlock_ap_iperf_attenuator_env,
+  nelson_ap_iperf_attenuator_env,
+]
diff --git a/pyproject.toml b/pyproject.toml
index c0fa915..b385122 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -20,25 +20,19 @@
 [project.optional-dependencies]
 # Required to support development tools
 dev = [
+    "mock",    # required for unit tests
     "shiv",    # packaging python
     "toml",    # required for pyproject.toml
     "vulture", # finds unused code
-    "yapf",    # code formatting
+    "black",   # code formatting
 ]
 digital_loggers_pdu = ["dlipower"]
-bluetooth = ["soundfile"]
 html_graphing = ["bokeh"]
 flash = ["usbinfo"]
 mdns = ["psutil", "zeroconf"]
 android = [
-    "Monsoon",
     "numpy",
-    "paramiko[ed25519]",
-    "pylibftdi",
-    "pyserial",
-    "requests",
     "scapy",
-    "scipy",
 ]
 
 [project.scripts]
@@ -49,19 +43,13 @@
 
 [tool.autoflake]
 imports = [
-    "Monsoon",
     "antlion",
     "dataclasses",
     "dlipower",
     "mobly",
     "mock",
     "numpy",
-    "paramiko",
-    "protobuf",
-    "pylibftdi",
-    "requests",
     "scapy",
-    "scipy",
     "tenacity",
     "usbinfo",
     "zeroconf",
diff --git a/runner/BUILD.gn b/runner/BUILD.gn
new file mode 100644
index 0000000..d405592
--- /dev/null
+++ b/runner/BUILD.gn
@@ -0,0 +1,50 @@
+import("//build/rust/rustc_binary.gni")
+import("//build/rust/rustc_test.gni")
+
+rustc_binary("runner") {
+  output_name = "antlion-runner"
+  edition = "2021"
+  with_unit_tests = true
+
+  deps = [
+    "//src/developer/ffx/lib/netext:lib($host_toolchain)",
+    "//src/lib/mdns/rust:mdns",
+    "//src/lib/network/packet",
+    "//third_party/rust_crates:anyhow",
+    "//third_party/rust_crates:argh",
+    "//third_party/rust_crates:home",
+    "//third_party/rust_crates:itertools",
+    "//third_party/rust_crates:lazy_static",
+    "//third_party/rust_crates:nix",
+    "//third_party/rust_crates:serde",
+    "//third_party/rust_crates:serde_json",
+    "//third_party/rust_crates:serde_yaml",
+    "//third_party/rust_crates:socket2",
+    "//third_party/rust_crates:thiserror",
+  ]
+
+  test_deps = [
+    "//third_party/rust_crates:assert_matches",
+    "//third_party/rust_crates:indoc",
+    "//third_party/rust_crates:pretty_assertions",
+    "//third_party/rust_crates:tempfile",
+  ]
+
+  sources = [
+    "src/config.rs",
+    "src/driver/infra.rs",
+    "src/driver/local.rs",
+    "src/driver/mod.rs",
+    "src/env.rs",
+    "src/finder.rs",
+    "src/main.rs",
+    "src/net.rs",
+    "src/runner.rs",
+    "src/yaml.rs",
+  ]
+}
+
+group("tests") {
+  testonly = true
+  deps = [ ":runner_test($host_toolchain)" ]
+}
diff --git a/runner/README.md b/runner/README.md
new file mode 100644
index 0000000..45c926a
--- /dev/null
+++ b/runner/README.md
@@ -0,0 +1,42 @@
+# antlion-runner
+
+A program to run antlion locally and in infrastructure. Includes a config
+generator with mDNS discovery and sensible defaults.
+
+## Using locally with an emulator
+
+Running antlion locally with a Fuchsia emulator allows developers to perform a
+sanity checks on their changes. Running this way is very quick (~5 seconds) and
+can spot simple mistakes before code review!
+
+1. Build Fuchsia with antlion support
+   ```sh
+   jiri update -gc # if you haven't updated in a while
+   fx set workstation_eng_paused.qemu-x64 \
+      --with-host //third_party/antlion:e2e_tests \
+      --with-host //third_party/antlion:tests \
+      --with //src/testing/sl4f
+   fx build # if you haven't built in a while
+   ```
+2. Start the package server. Keep this running in the background.
+   ```sh
+   fx serve
+   ```
+3. In a separate terminal, start the emulator with access to external networks.
+   ```sh
+   ffx emu stop && ffx emu start -H --net tap && ffx log
+   ```
+4. In a separate terminal, run a test
+   ```sh
+   fx test --e2e --output //third_party/antlion:sl4f_sanity_test
+   ```
+
+> Note: Local testing with auxiliary devices is not supported by antlion runner.
+Use antlion directly for these cases; see the antlion [README](../README.md).
+
+## Testing
+
+```sh
+fx set core.qemu-x64 --with //vendor/google/build/python/antlion/runner:tests
+fx test --output //vendor/google/build/python/antlion/runner:runner_test
+```
diff --git a/runner/src/config.rs b/runner/src/config.rs
new file mode 100644
index 0000000..571a8ab
--- /dev/null
+++ b/runner/src/config.rs
@@ -0,0 +1,162 @@
+// Copyright 2022 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+use crate::net::IpAddr;
+use crate::yaml;
+
+use std::path::PathBuf;
+
+use serde::{Deserialize, Serialize};
+use serde_yaml::Value;
+
+#[derive(Clone, Debug, Serialize)]
+#[serde(rename_all = "PascalCase")]
+/// Config used by antlion for declaring testbeds and test parameters.
+pub(crate) struct Config {
+    #[serde(rename = "TestBeds")]
+    pub testbeds: Vec<Testbed>,
+    pub mobly_params: MoblyParams,
+}
+
+impl Config {
+    /// Merge the given test parameters into all testbeds.
+    pub fn merge_test_params(&mut self, test_params: Value) {
+        for testbed in self.testbeds.iter_mut() {
+            match testbed.test_params.as_mut() {
+                Some(existing) => yaml::merge(existing, test_params.clone()),
+                None => testbed.test_params = Some(test_params.clone()),
+            }
+        }
+    }
+}
+
+#[derive(Clone, Debug, Serialize)]
+#[serde(rename_all = "PascalCase")]
+/// Parameters consumed by Mobly.
+pub(crate) struct MoblyParams {
+    pub log_path: PathBuf,
+}
+
+#[derive(Clone, Debug, Serialize)]
+#[serde(rename_all = "PascalCase")]
+/// A group of interconnected devices to be used together during an antlion test.
+pub(crate) struct Testbed {
+    pub name: String,
+    pub controllers: Controllers,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub test_params: Option<Value>,
+}
+
+#[derive(Clone, Debug, Default, Serialize)]
+pub(crate) struct Controllers {
+    #[serde(rename = "FuchsiaDevice", skip_serializing_if = "Vec::is_empty")]
+    pub fuchsia_devices: Vec<Fuchsia>,
+    #[serde(rename = "AccessPoint", skip_serializing_if = "Vec::is_empty")]
+    pub access_points: Vec<AccessPoint>,
+    #[serde(rename = "Attenuator", skip_serializing_if = "Vec::is_empty")]
+    pub attenuators: Vec<Attenuator>,
+    #[serde(rename = "PduDevice", skip_serializing_if = "Vec::is_empty")]
+    pub pdus: Vec<Pdu>,
+    #[serde(rename = "IPerfServer", skip_serializing_if = "Vec::is_empty")]
+    pub iperf_servers: Vec<IPerfServer>,
+}
+
+#[derive(Clone, Debug, Serialize)]
+/// A Fuchsia device for use with antlion as defined by [fuchsia_device.py].
+///
+/// [fuchsia_device.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/src/antlion/controllers/fuchsia_device.py
+pub(crate) struct Fuchsia {
+    pub mdns_name: String,
+    pub ip: IpAddr,
+    pub take_bug_report_on_fail: bool,
+    pub ssh_binary_path: PathBuf,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub ssh_config: Option<PathBuf>,
+    pub ffx_binary_path: PathBuf,
+    pub ssh_priv_key: PathBuf,
+    #[serde(rename = "PduDevice", skip_serializing_if = "Option::is_none")]
+    pub pdu_device: Option<PduRef>,
+    pub hard_reboot_on_fail: bool,
+}
+
+#[derive(Clone, Debug, Serialize, Deserialize)]
+/// Reference to a PDU device. Used to specify which port the attached device
+/// maps to on the PDU.
+pub(crate) struct PduRef {
+    #[serde(default = "default_pdu_device")]
+    pub device: String,
+    #[serde(rename(serialize = "host"))]
+    pub ip: IpAddr,
+    pub port: u8,
+}
+
+fn default_pdu_device() -> String {
+    "synaccess.np02b".to_string()
+}
+
+#[derive(Clone, Debug, Serialize)]
+/// Declares an access point for use with antlion as defined by [access_point.py].
+///
+/// [access_point.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/src/antlion/controllers/access_point.py
+pub(crate) struct AccessPoint {
+    pub wan_interface: String,
+    pub ssh_config: SshConfig,
+    #[serde(rename = "PduDevice", skip_serializing_if = "Option::is_none")]
+    pub pdu_device: Option<PduRef>,
+    #[serde(rename = "Attenuator", skip_serializing_if = "Option::is_none")]
+    pub attenuators: Option<Vec<AttenuatorRef>>,
+}
+
+#[derive(Clone, Debug, Serialize)]
+pub(crate) struct SshConfig {
+    pub ssh_binary_path: PathBuf,
+    pub host: IpAddr,
+    pub user: String,
+    pub identity_file: PathBuf,
+}
+
+#[derive(Clone, Debug, Serialize)]
+/// Reference to an attenuator device. Used to specify which ports the attached
+/// devices' channels maps to on the attenuator.
+pub(crate) struct AttenuatorRef {
+    #[serde(rename = "Address")]
+    pub address: IpAddr,
+    #[serde(rename = "attenuator_ports_wifi_2g")]
+    pub ports_2g: Vec<u8>,
+    #[serde(rename = "attenuator_ports_wifi_5g")]
+    pub ports_5g: Vec<u8>,
+}
+
+#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
+#[serde(rename_all = "PascalCase")]
+/// Declares an attenuator for use with antlion as defined by [attenuator.py].
+///
+/// [access_point.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/src/antlion/controllers/attenuator.py
+pub(crate) struct Attenuator {
+    pub model: String,
+    pub instrument_count: u8,
+    pub address: IpAddr,
+    pub protocol: String,
+    pub port: u16,
+}
+
+#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
+/// Declares a power distribution unit for use with antlion as defined by [pdu.py].
+///
+/// [pdu.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/src/antlion/controllers/pdu.py
+pub(crate) struct Pdu {
+    pub device: String,
+    pub host: IpAddr,
+}
+
+#[derive(Clone, Debug, Serialize)]
+/// Declares an iPerf3 server for use with antlion as defined by [iperf_server.py].
+///
+/// [iperf_server.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/src/antlion/controllers/iperf_server.py
+pub(crate) struct IPerfServer {
+    pub ssh_config: SshConfig,
+    pub port: u16,
+    pub test_interface: String,
+    pub use_killall: bool,
+}
diff --git a/runner/src/driver/infra.rs b/runner/src/driver/infra.rs
new file mode 100644
index 0000000..ceff26e
--- /dev/null
+++ b/runner/src/driver/infra.rs
@@ -0,0 +1,898 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+use crate::config::PduRef;
+use crate::config::{self, Config};
+use crate::driver::Driver;
+use crate::env::Environment;
+use crate::net::IpAddr;
+use crate::yaml;
+
+use std::collections::{HashMap, HashSet};
+use std::fs;
+use std::path::{Path, PathBuf};
+
+use anyhow::{anyhow, Context, Result};
+use itertools::Itertools;
+use serde::Deserialize;
+use serde_yaml::Value;
+use thiserror::Error;
+
+const TESTBED_NAME: &'static str = "antlion-runner";
+const ENV_OUT_DIR: &'static str = "FUCHSIA_TEST_OUTDIR";
+const ENV_TESTBED_CONFIG: &'static str = "FUCHSIA_TESTBED_CONFIG";
+const TEST_SUMMARY_FILE: &'static str = "test_summary.yaml";
+
+#[derive(Debug)]
+/// Driver for running antlion on emulated and hardware testbeds hosted by
+/// Fuchsia infrastructure.
+pub(crate) struct InfraDriver {
+    output_dir: PathBuf,
+    config: Config,
+}
+
+#[derive(Error, Debug)]
+pub(crate) enum InfraDriverError {
+    #[error("infra environment not detected, \"{0}\" environment variable not present")]
+    NotDetected(String),
+    #[error(transparent)]
+    Config(#[from] ConfigError),
+    #[error(transparent)]
+    Other(#[from] anyhow::Error),
+}
+
+#[derive(Error, Debug)]
+pub(crate) enum ConfigError {
+    #[error("ip {ip} in use by several devices")]
+    DuplicateIp { ip: IpAddr },
+    #[error("ip {ip} port {port} in use by several devices")]
+    DuplicatePort { ip: IpAddr, port: u8 },
+}
+
+impl InfraDriver {
+    /// Detect an InfraDriver. Returns None if the required environmental
+    /// variables are not found.
+    pub fn new<E: Environment>(
+        env: E,
+        ssh_binary: PathBuf,
+        ffx_binary: PathBuf,
+    ) -> Result<Self, InfraDriverError> {
+        let config_path = match env.var(ENV_TESTBED_CONFIG) {
+            Ok(p) => PathBuf::from(p),
+            Err(std::env::VarError::NotPresent) => {
+                return Err(InfraDriverError::NotDetected(ENV_TESTBED_CONFIG.to_string()))
+            }
+            Err(e) => {
+                return Err(InfraDriverError::Other(anyhow!(
+                    "Failed to read \"{ENV_TESTBED_CONFIG}\" {e}"
+                )))
+            }
+        };
+        let config = fs::read_to_string(&config_path)
+            .with_context(|| format!("Failed to read \"{}\"", config_path.display()))?;
+        let targets: Vec<InfraTarget> = serde_json::from_str(&config)
+            .with_context(|| format!("Failed to parse into InfraTarget: \"{config}\""))?;
+        if targets.len() == 0 {
+            return Err(InfraDriverError::Other(anyhow!(
+                "Expected at least one target declared in \"{}\"",
+                config_path.display()
+            )));
+        }
+
+        let output_path = match env.var(ENV_OUT_DIR) {
+            Ok(p) => p,
+            Err(std::env::VarError::NotPresent) => {
+                return Err(InfraDriverError::NotDetected(ENV_OUT_DIR.to_string()))
+            }
+            Err(e) => {
+                return Err(InfraDriverError::Other(anyhow!(
+                    "Failed to read \"{ENV_OUT_DIR}\" {e}"
+                )))
+            }
+        };
+        let output_dir = PathBuf::from(output_path);
+        if !fs::metadata(&output_dir).context("Failed to stat the output directory")?.is_dir() {
+            return Err(InfraDriverError::Other(anyhow!(
+                "Expected a directory but found a file at \"{}\"",
+                output_dir.display()
+            )));
+        }
+
+        Ok(InfraDriver {
+            output_dir: output_dir.clone(),
+            config: InfraDriver::parse_targets(targets, ssh_binary, ffx_binary, output_dir)?,
+        })
+    }
+
+    fn parse_targets(
+        targets: Vec<InfraTarget>,
+        ssh_binary: PathBuf,
+        ffx_binary: PathBuf,
+        output_dir: PathBuf,
+    ) -> Result<Config, InfraDriverError> {
+        let mut fuchsia_devices: Vec<config::Fuchsia> = vec![];
+        let mut access_points: Vec<config::AccessPoint> = vec![];
+        let mut attenuators: HashMap<IpAddr, config::Attenuator> = HashMap::new();
+        let mut pdus: HashMap<IpAddr, config::Pdu> = HashMap::new();
+        let mut iperf_servers: Vec<config::IPerfServer> = vec![];
+        let mut test_params: Option<Value> = None;
+
+        let mut used_ips: HashSet<IpAddr> = HashSet::new();
+        let mut used_ports: HashMap<IpAddr, HashSet<u8>> = HashMap::new();
+
+        let mut register_ip = |ip: IpAddr| -> Result<(), InfraDriverError> {
+            if !used_ips.insert(ip.clone()) {
+                return Err(ConfigError::DuplicateIp { ip }.into());
+            }
+            Ok(())
+        };
+
+        let mut register_port = |ip: IpAddr, port: u8| -> Result<(), InfraDriverError> {
+            match used_ports.get_mut(&ip) {
+                Some(ports) => {
+                    if !ports.insert(port) {
+                        return Err(ConfigError::DuplicatePort { ip, port }.into());
+                    }
+                }
+                None => {
+                    if used_ports.insert(ip, HashSet::from([port])).is_some() {
+                        return Err(InfraDriverError::Other(anyhow!(
+                            "Used ports set was unexpectedly modified by concurrent use",
+                        )));
+                    }
+                }
+            };
+            Ok(())
+        };
+
+        let mut register_pdu = |p: Option<PduRef>| -> Result<(), InfraDriverError> {
+            if let Some(PduRef { device, ip, port }) = p {
+                register_port(ip.clone(), port)?;
+                let new = config::Pdu { device: device.clone(), host: ip.clone() };
+                if let Some(old) = pdus.insert(ip.clone(), new.clone()) {
+                    if old != new {
+                        return Err(ConfigError::DuplicateIp { ip }.into());
+                    }
+                }
+            }
+            Ok(())
+        };
+
+        let mut register_attenuator = |a: Option<AttenuatorRef>| -> Result<(), InfraDriverError> {
+            if let Some(a) = a {
+                let new = config::Attenuator {
+                    model: "minicircuits".to_string(),
+                    instrument_count: 4,
+                    address: a.ip.clone(),
+                    protocol: "http".to_string(),
+                    port: 80,
+                };
+                if let Some(old) = attenuators.insert(a.ip.clone(), new.clone()) {
+                    if old != new {
+                        return Err(ConfigError::DuplicateIp { ip: a.ip }.into());
+                    }
+                }
+            }
+            Ok(())
+        };
+
+        let mut merge_test_params = |p: Option<Value>| {
+            match (test_params.as_mut(), p) {
+                (None, Some(new)) => test_params = Some(new),
+                (Some(existing), Some(new)) => yaml::merge(existing, new),
+                (_, None) => {}
+            };
+        };
+
+        for target in targets {
+            match target {
+                InfraTarget::FuchsiaDevice { nodename, ipv4, ipv6, ssh_key, pdu, test_params } => {
+                    let ip: IpAddr = if !ipv4.is_empty() {
+                        ipv4.parse().context("Invalid IPv4 address")
+                    } else if !ipv6.is_empty() {
+                        ipv6.parse().context("Invalid IPv6 address")
+                    } else {
+                        Err(anyhow!("IP address not specified"))
+                    }?;
+
+                    fuchsia_devices.push(config::Fuchsia {
+                        mdns_name: nodename.clone(),
+                        ip: ip.clone(),
+                        take_bug_report_on_fail: true,
+                        ssh_binary_path: ssh_binary.clone(),
+                        // TODO(http://b/244747218): Remove when ssh_config is refactored away
+                        ssh_config: None,
+                        ffx_binary_path: ffx_binary.clone(),
+                        ssh_priv_key: ssh_key.clone(),
+                        pdu_device: pdu.clone(),
+                        hard_reboot_on_fail: true,
+                    });
+
+                    register_ip(ip)?;
+                    register_pdu(pdu)?;
+                    merge_test_params(test_params);
+                }
+                InfraTarget::AccessPoint { ip, attenuator, pdu, ssh_key } => {
+                    access_points.push(config::AccessPoint {
+                        wan_interface: "eth0".to_string(),
+                        ssh_config: config::SshConfig {
+                            ssh_binary_path: ssh_binary.clone(),
+                            host: ip.clone(),
+                            user: "root".to_string(),
+                            identity_file: ssh_key.clone(),
+                        },
+                        pdu_device: pdu.clone(),
+                        attenuators: attenuator.as_ref().map(|a| {
+                            vec![config::AttenuatorRef {
+                                address: a.ip.clone(),
+                                ports_2g: vec![1, 2, 3],
+                                ports_5g: vec![1, 2, 3],
+                            }]
+                        }),
+                    });
+
+                    register_ip(ip)?;
+                    register_pdu(pdu)?;
+                    register_attenuator(attenuator)?;
+                }
+                InfraTarget::IPerfServer { ip, user, test_interface, pdu, ssh_key } => {
+                    iperf_servers.push(config::IPerfServer {
+                        ssh_config: config::SshConfig {
+                            ssh_binary_path: ssh_binary.clone(),
+                            host: ip.clone(),
+                            user: user.to_string(),
+                            identity_file: ssh_key.clone(),
+                        },
+                        port: 5201,
+                        test_interface: test_interface.clone(),
+                        use_killall: true,
+                    });
+
+                    register_ip(ip.clone())?;
+                    register_pdu(pdu)?;
+                }
+            };
+        }
+
+        Ok(Config {
+            testbeds: vec![config::Testbed {
+                name: TESTBED_NAME.to_string(),
+                controllers: config::Controllers {
+                    fuchsia_devices: fuchsia_devices,
+                    access_points: access_points,
+                    attenuators: attenuators
+                        .into_values()
+                        .sorted_by_key(|a| a.address.clone())
+                        .collect(),
+                    pdus: pdus.into_values().sorted_by_key(|p| p.host.clone()).collect(),
+                    iperf_servers: iperf_servers,
+                },
+                test_params,
+            }],
+            mobly_params: config::MoblyParams { log_path: output_dir },
+        })
+    }
+}
+
+impl Driver for InfraDriver {
+    fn output_path(&self) -> &Path {
+        self.output_dir.as_path()
+    }
+    fn config(&self) -> Config {
+        self.config.clone()
+    }
+    fn teardown(&self) -> Result<()> {
+        let results_path =
+            self.output_dir.join(TESTBED_NAME).join("latest").join(TEST_SUMMARY_FILE);
+        match fs::File::open(&results_path) {
+            Ok(mut results) => {
+                println!("\nTest results from {}\n", results_path.display());
+                println!("[=====MOBLY RESULTS=====]");
+                std::io::copy(&mut results, &mut std::io::stdout())
+                    .context("Failed to copy results to stdout")?;
+            }
+            Err(e) => eprintln!("Failed to open \"{}\": {}", results_path.display(), e),
+        };
+
+        // Remove any symlinks from the output directory; this causes errors
+        // while uploading to CAS.
+        //
+        // TODO: Remove when the fix is released and supported on Swarming bots
+        // https://github.com/bazelbuild/remote-apis-sdks/pull/229.
+        remove_symlinks(self.output_dir.clone())?;
+
+        Ok(())
+    }
+}
+
+fn remove_symlinks<P: AsRef<Path>>(path: P) -> Result<()> {
+    let meta = fs::symlink_metadata(path.as_ref())?;
+    if meta.is_symlink() {
+        fs::remove_file(path)?;
+    } else if meta.is_dir() {
+        for entry in fs::read_dir(path)? {
+            remove_symlinks(entry?.path())?;
+        }
+    }
+    Ok(())
+}
+
+#[derive(Debug, Deserialize)]
+#[serde(tag = "type")]
+/// Schema used to communicate target information from the test environment set
+/// up by botanist.
+///
+/// See https://cs.opensource.google/fuchsia/fuchsia/+/main:tools/botanist/README.md
+enum InfraTarget {
+    FuchsiaDevice {
+        nodename: String,
+        ipv4: String,
+        ipv6: String,
+        ssh_key: PathBuf,
+        pdu: Option<PduRef>,
+        test_params: Option<Value>,
+    },
+    AccessPoint {
+        ip: IpAddr,
+        ssh_key: PathBuf,
+        attenuator: Option<AttenuatorRef>,
+        pdu: Option<PduRef>,
+    },
+    IPerfServer {
+        ip: IpAddr,
+        ssh_key: PathBuf,
+        #[serde(default = "default_iperf_user")]
+        user: String,
+        test_interface: String,
+        pdu: Option<PduRef>,
+    },
+}
+
+fn default_iperf_user() -> String {
+    "pi".to_string()
+}
+
+#[derive(Clone, Debug, Deserialize)]
+struct AttenuatorRef {
+    ip: IpAddr,
+}
+
+#[cfg(test)]
+mod test {
+    use super::*;
+
+    use crate::run;
+    use crate::runner::Runner;
+    use crate::{env::Environment, runner::ExitStatus};
+
+    use std::ffi::OsStr;
+
+    use assert_matches::assert_matches;
+    use indoc::formatdoc;
+    use pretty_assertions::assert_eq;
+    use serde_json::json;
+    use tempfile::{NamedTempFile, TempDir};
+
+    const FUCHSIA_NAME: &'static str = "fuchsia-1234-5678-9abc";
+    const FUCHSIA_ADDR: &'static str = "fe80::1%2";
+
+    #[derive(Default)]
+    struct MockRunner {
+        out_dir: PathBuf,
+        config: std::cell::Cell<PathBuf>,
+    }
+    impl MockRunner {
+        fn new(out_dir: PathBuf) -> Self {
+            Self { out_dir, ..Default::default() }
+        }
+    }
+    impl Runner for MockRunner {
+        fn run(&self, config: PathBuf) -> Result<ExitStatus> {
+            self.config.set(config);
+
+            let antlion_out = self.out_dir.join(TESTBED_NAME).join("latest");
+            fs::create_dir_all(&antlion_out)
+                .context("Failed to create antlion output directory")?;
+            fs::write(antlion_out.join(TEST_SUMMARY_FILE), "")
+                .context("Failed to write test_summary.yaml")?;
+            Ok(ExitStatus::Ok)
+        }
+    }
+
+    struct MockEnvironment {
+        config: Option<PathBuf>,
+        out_dir: Option<PathBuf>,
+    }
+    impl Environment for MockEnvironment {
+        fn var<K: AsRef<OsStr>>(&self, key: K) -> Result<String, std::env::VarError> {
+            if key.as_ref() == ENV_TESTBED_CONFIG {
+                self.config
+                    .clone()
+                    .ok_or(std::env::VarError::NotPresent)
+                    .map(|p| p.into_os_string().into_string().unwrap())
+            } else if key.as_ref() == ENV_OUT_DIR {
+                self.out_dir
+                    .clone()
+                    .ok_or(std::env::VarError::NotPresent)
+                    .map(|p| p.into_os_string().into_string().unwrap())
+            } else {
+                Err(std::env::VarError::NotPresent)
+            }
+        }
+    }
+
+    #[test]
+    fn infra_not_detected() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let env = MockEnvironment { config: None, out_dir: None };
+
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf());
+        assert_matches!(got, Err(InfraDriverError::NotDetected(_)));
+    }
+
+    #[test]
+    fn infra_not_detected_config() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+        let env = MockEnvironment { config: None, out_dir: Some(out_dir.path().to_path_buf()) };
+
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf());
+        assert_matches!(got, Err(InfraDriverError::NotDetected(v)) if v == ENV_TESTBED_CONFIG);
+    }
+
+    #[test]
+    fn infra_not_detected_out_dir() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(
+            testbed_config.as_file(),
+            &json!([{
+                "type": "FuchsiaDevice",
+                "nodename": FUCHSIA_NAME,
+                "ipv4": "",
+                "ipv6": FUCHSIA_ADDR,
+                "ssh_key": ssh_key.path(),
+            }]),
+        )
+        .unwrap();
+
+        let env =
+            MockEnvironment { config: Some(testbed_config.path().to_path_buf()), out_dir: None };
+
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf());
+        assert_matches!(got, Err(InfraDriverError::NotDetected(v)) if v == ENV_OUT_DIR);
+    }
+
+    #[test]
+    fn infra_invalid_config() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(testbed_config.as_file(), &json!({ "foo": "bar" })).unwrap();
+
+        let env = MockEnvironment {
+            config: Some(testbed_config.path().to_path_buf()),
+            out_dir: Some(out_dir.path().to_path_buf()),
+        };
+
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf());
+        assert_matches!(got, Err(_));
+    }
+
+    #[test]
+    fn infra() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(
+            testbed_config.as_file(),
+            &json!([{
+                "type": "FuchsiaDevice",
+                "nodename": FUCHSIA_NAME,
+                "ipv4": "",
+                "ipv6": FUCHSIA_ADDR,
+                "ssh_key": ssh_key.path(),
+            }]),
+        )
+        .unwrap();
+
+        let runner = MockRunner::new(out_dir.path().to_path_buf());
+        let env = MockEnvironment {
+            config: Some(testbed_config.path().to_path_buf()),
+            out_dir: Some(out_dir.path().to_path_buf()),
+        };
+        let driver =
+            InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf()).unwrap();
+        run(runner, driver, None).unwrap();
+
+        let got = fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
+
+        let ssh_path = ssh.path().display().to_string();
+        let ssh_key_path = ssh_key.path().display().to_string();
+        let ffx_path = ffx.path().display().to_string();
+        let out_path = out_dir.path().display();
+        let want = formatdoc! {r#"
+        TestBeds:
+        - Name: {TESTBED_NAME}
+          Controllers:
+            FuchsiaDevice:
+            - mdns_name: {FUCHSIA_NAME}
+              ip: {FUCHSIA_ADDR}
+              take_bug_report_on_fail: true
+              ssh_binary_path: {ssh_path}
+              ffx_binary_path: {ffx_path}
+              ssh_priv_key: {ssh_key_path}
+              hard_reboot_on_fail: true
+        MoblyParams:
+          LogPath: {out_path}
+        "#};
+
+        assert_eq!(got, want);
+    }
+
+    #[test]
+    fn infra_with_test_params() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(
+            testbed_config.as_file(),
+            &json!([{
+                "type": "FuchsiaDevice",
+                "nodename": FUCHSIA_NAME,
+                "ipv4": "",
+                "ipv6": FUCHSIA_ADDR,
+                "ssh_key": ssh_key.path(),
+                "test_params": {
+                    "sl4f_sanity_test_params": {
+                        "can_overwrite": false,
+                        "from_original": true,
+                    }
+                }
+            }]),
+        )
+        .unwrap();
+
+        let runner = MockRunner::new(out_dir.path().to_path_buf());
+        let env = MockEnvironment {
+            config: Some(testbed_config.path().to_path_buf()),
+            out_dir: Some(out_dir.path().to_path_buf()),
+        };
+        let driver =
+            InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf()).unwrap();
+        let params = "
+            sl4f_sanity_test_params:
+                merged_with: true
+                can_overwrite: true
+        ";
+        let params = serde_yaml::from_str(params).unwrap();
+        run(runner, driver, Some(params)).unwrap();
+
+        let got = fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
+
+        let ssh_path = ssh.path().display().to_string();
+        let ssh_key_path = ssh_key.path().display().to_string();
+        let ffx_path = ffx.path().display().to_string();
+        let out_path = out_dir.path().display();
+        let want = formatdoc! {r#"
+        TestBeds:
+        - Name: {TESTBED_NAME}
+          Controllers:
+            FuchsiaDevice:
+            - mdns_name: {FUCHSIA_NAME}
+              ip: {FUCHSIA_ADDR}
+              take_bug_report_on_fail: true
+              ssh_binary_path: {ssh_path}
+              ffx_binary_path: {ffx_path}
+              ssh_priv_key: {ssh_key_path}
+              hard_reboot_on_fail: true
+          TestParams:
+            sl4f_sanity_test_params:
+              can_overwrite: true
+              from_original: true
+              merged_with: true
+        MoblyParams:
+          LogPath: {out_path}
+        "#};
+
+        assert_eq!(got, want);
+    }
+
+    #[test]
+    fn infra_with_auxiliary_devices() {
+        const FUCHSIA_PDU_IP: &'static str = "192.168.42.14";
+        const FUCHSIA_PDU_PORT: u8 = 1;
+        const AP_IP: &'static str = "192.168.42.11";
+        const AP_AND_IPERF_PDU_IP: &'static str = "192.168.42.13";
+        const AP_PDU_PORT: u8 = 1;
+        const ATTENUATOR_IP: &'static str = "192.168.42.15";
+        const IPERF_IP: &'static str = "192.168.42.12";
+        const IPERF_USER: &'static str = "alice";
+        const IPERF_PDU_PORT: u8 = 2;
+
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(
+            testbed_config.as_file(),
+            &json!([{
+                "type": "FuchsiaDevice",
+                "nodename": FUCHSIA_NAME,
+                "ipv4": "",
+                "ipv6": FUCHSIA_ADDR,
+                "ssh_key": ssh_key.path(),
+                "pdu": {
+                    "ip": FUCHSIA_PDU_IP,
+                    "port": FUCHSIA_PDU_PORT,
+                },
+            }, {
+                "type": "AccessPoint",
+                "ip": AP_IP,
+                "ssh_key": ssh_key.path(),
+                "attenuator": {
+                    "ip": ATTENUATOR_IP,
+                },
+                "pdu": {
+                    "ip": AP_AND_IPERF_PDU_IP,
+                    "port": AP_PDU_PORT,
+                    "device": "fancy-pdu",
+                },
+            }, {
+                "type": "IPerfServer",
+                "ip": IPERF_IP,
+                "ssh_key": ssh_key.path(),
+                "user": IPERF_USER,
+                "test_interface": "eth0",
+                "pdu": {
+                    "ip": AP_AND_IPERF_PDU_IP,
+                    "port": IPERF_PDU_PORT,
+                    "device": "fancy-pdu",
+                },
+            }]),
+        )
+        .unwrap();
+
+        let runner = MockRunner::new(out_dir.path().to_path_buf());
+        let env = MockEnvironment {
+            config: Some(testbed_config.path().to_path_buf()),
+            out_dir: Some(out_dir.path().to_path_buf()),
+        };
+        let driver =
+            InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf()).unwrap();
+        run(runner, driver, None).unwrap();
+
+        let got = std::fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
+
+        let ssh_path = ssh.path().display().to_string();
+        let ssh_key_path = ssh_key.path().display().to_string();
+        let ffx_path = ffx.path().display().to_string();
+        let out_path = out_dir.path().display();
+        let want = formatdoc! {r#"
+        TestBeds:
+        - Name: {TESTBED_NAME}
+          Controllers:
+            FuchsiaDevice:
+            - mdns_name: {FUCHSIA_NAME}
+              ip: {FUCHSIA_ADDR}
+              take_bug_report_on_fail: true
+              ssh_binary_path: {ssh_path}
+              ffx_binary_path: {ffx_path}
+              ssh_priv_key: {ssh_key_path}
+              PduDevice:
+                device: synaccess.np02b
+                host: {FUCHSIA_PDU_IP}
+                port: {FUCHSIA_PDU_PORT}
+              hard_reboot_on_fail: true
+            AccessPoint:
+            - wan_interface: eth0
+              ssh_config:
+                ssh_binary_path: {ssh_path}
+                host: {AP_IP}
+                user: root
+                identity_file: {ssh_key_path}
+              PduDevice:
+                device: fancy-pdu
+                host: {AP_AND_IPERF_PDU_IP}
+                port: {AP_PDU_PORT}
+              Attenuator:
+              - Address: {ATTENUATOR_IP}
+                attenuator_ports_wifi_2g:
+                - 1
+                - 2
+                - 3
+                attenuator_ports_wifi_5g:
+                - 1
+                - 2
+                - 3
+            Attenuator:
+            - Model: minicircuits
+              InstrumentCount: 4
+              Address: {ATTENUATOR_IP}
+              Protocol: http
+              Port: 80
+            PduDevice:
+            - device: fancy-pdu
+              host: {AP_AND_IPERF_PDU_IP}
+            - device: synaccess.np02b
+              host: {FUCHSIA_PDU_IP}
+            IPerfServer:
+            - ssh_config:
+                ssh_binary_path: {ssh_path}
+                host: {IPERF_IP}
+                user: {IPERF_USER}
+                identity_file: {ssh_key_path}
+              port: 5201
+              test_interface: eth0
+              use_killall: true
+        MoblyParams:
+          LogPath: {out_path}
+        "#};
+
+        assert_eq!(got, want);
+    }
+
+    #[test]
+    fn infra_duplicate_port_pdu() {
+        let pdu_ip: IpAddr = "192.168.42.13".parse().unwrap();
+        let pdu_port = 1;
+
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(
+            testbed_config.as_file(),
+            &json!([{
+                "type": "FuchsiaDevice",
+                "nodename": "foo",
+                "ipv4": "",
+                "ipv6": "fe80::1%2",
+                "ssh_key": ssh_key.path(),
+                "pdu": {
+                    "ip": pdu_ip,
+                    "port": pdu_port,
+                },
+            }, {
+                "type": "AccessPoint",
+                "ip": "192.168.42.11",
+                "ssh_key": ssh_key.path(),
+                "pdu": {
+                    "ip": pdu_ip,
+                    "port": pdu_port,
+                },
+            }]),
+        )
+        .unwrap();
+
+        let env = MockEnvironment {
+            config: Some(testbed_config.path().to_path_buf()),
+            out_dir: Some(out_dir.path().to_path_buf()),
+        };
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf());
+        assert_matches!(got,
+            Err(InfraDriverError::Config(ConfigError::DuplicatePort { ip, port }))
+                if ip == pdu_ip && port == pdu_port
+        );
+    }
+
+    #[test]
+    fn infra_duplicate_ip_pdu() {
+        let duplicate_ip: IpAddr = "192.168.42.13".parse().unwrap();
+
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(
+            testbed_config.as_file(),
+            &json!([{
+                "type": "FuchsiaDevice",
+                "nodename": "foo",
+                "ipv4": "",
+                "ipv6": "fe80::1%2",
+                "ssh_key": ssh_key.path(),
+                "pdu": {
+                    "ip": duplicate_ip,
+                    "port": 1,
+                    "device": "A",
+                },
+            }, {
+                "type": "AccessPoint",
+                "ip": "192.168.42.11",
+                "ssh_key": ssh_key.path(),
+                "pdu": {
+                    "ip": duplicate_ip,
+                    "port": 2,
+                    "device": "B",
+                },
+            }]),
+        )
+        .unwrap();
+
+        let env = MockEnvironment {
+            config: Some(testbed_config.path().to_path_buf()),
+            out_dir: Some(out_dir.path().to_path_buf()),
+        };
+        assert_matches!(
+            InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf()),
+            Err(InfraDriverError::Config(ConfigError::DuplicateIp { ip }))
+                if ip == duplicate_ip
+        );
+    }
+
+    #[test]
+    fn infra_duplicate_ip_devices() {
+        let duplicate_ip: IpAddr = "192.168.42.11".parse().unwrap();
+
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(
+            testbed_config.as_file(),
+            &json!([{
+                "type": "FuchsiaDevice",
+                "nodename": "foo",
+                "ipv4": duplicate_ip,
+                "ipv6": "",
+                "ssh_key": ssh_key.path(),
+            }, {
+                "type": "AccessPoint",
+                "ip": duplicate_ip,
+                "ssh_key": ssh_key.path(),
+            }]),
+        )
+        .unwrap();
+
+        let env = MockEnvironment {
+            config: Some(testbed_config.path().to_path_buf()),
+            out_dir: Some(out_dir.path().to_path_buf()),
+        };
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf());
+        assert_matches!(got,
+            Err(InfraDriverError::Config(ConfigError::DuplicateIp { ip }))
+                if ip == duplicate_ip
+        );
+    }
+
+    #[test]
+    fn remove_symlinks_works() {
+        const SYMLINK_FILE: &'static str = "latest";
+
+        let out_dir = TempDir::new().unwrap();
+        let test_file = NamedTempFile::new_in(&out_dir).unwrap();
+        let symlink_path = out_dir.path().join(SYMLINK_FILE);
+
+        #[cfg(unix)]
+        std::os::unix::fs::symlink(&test_file, &symlink_path).unwrap();
+        #[cfg(windows)]
+        std::os::windows::fs::symlink_file(&test_file, &symlink_path).unwrap();
+
+        assert_matches!(remove_symlinks(out_dir.path()), Ok(()));
+        assert_matches!(fs::symlink_metadata(symlink_path), Err(e) if e.kind() == std::io::ErrorKind::NotFound);
+        assert_matches!(fs::symlink_metadata(test_file), Ok(meta) if meta.is_file());
+    }
+}
diff --git a/runner/src/driver/local.rs b/runner/src/driver/local.rs
new file mode 100644
index 0000000..983a6a7
--- /dev/null
+++ b/runner/src/driver/local.rs
@@ -0,0 +1,287 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+use crate::config;
+use crate::driver::Driver;
+use crate::finder::{Answer, Finder};
+use crate::net::IpAddr;
+
+use std::path::{Path, PathBuf};
+
+use anyhow::{ensure, Context, Result};
+use home::home_dir;
+
+const TESTBED_NAME: &'static str = "antlion-runner";
+
+/// Driver for running antlion locally on an emulated or hardware testbed with
+/// optional mDNS discovery when a DHCP server is not available. This is useful
+/// for testing changes locally in a development environment.
+pub(crate) struct LocalDriver {
+    target: LocalTarget,
+    output_dir: PathBuf,
+    ssh_binary: PathBuf,
+    ffx_binary: PathBuf,
+}
+
+impl LocalDriver {
+    pub fn new<F>(
+        device: Option<String>,
+        ssh_binary: PathBuf,
+        ssh_key: Option<PathBuf>,
+        ffx_binary: PathBuf,
+        out_dir: Option<PathBuf>,
+    ) -> Result<Self>
+    where
+        F: Finder,
+    {
+        let output_dir = match out_dir {
+            Some(p) => Ok(p),
+            None => std::env::current_dir().context("Failed to get current working directory"),
+        }?;
+        Ok(Self {
+            target: LocalTarget::new::<F>(device, ssh_key)?,
+            output_dir,
+            ssh_binary,
+            ffx_binary,
+        })
+    }
+}
+
+impl Driver for LocalDriver {
+    fn output_path(&self) -> &Path {
+        self.output_dir.as_path()
+    }
+    fn config(&self) -> config::Config {
+        config::Config {
+            testbeds: vec![config::Testbed {
+                name: TESTBED_NAME.to_string(),
+                controllers: config::Controllers {
+                    fuchsia_devices: vec![config::Fuchsia {
+                        mdns_name: self.target.name.clone(),
+                        ip: self.target.ip.clone(),
+                        take_bug_report_on_fail: true,
+                        ssh_binary_path: self.ssh_binary.clone(),
+                        // TODO(http://b/244747218): Remove when ssh_config is refactored away
+                        ssh_config: None,
+                        ffx_binary_path: self.ffx_binary.clone(),
+                        ssh_priv_key: self.target.ssh_key.clone(),
+                        pdu_device: None,
+                        hard_reboot_on_fail: true,
+                    }],
+                    ..Default::default()
+                },
+                test_params: None,
+            }],
+            mobly_params: config::MoblyParams { log_path: self.output_dir.clone() },
+        }
+    }
+    fn teardown(&self) -> Result<()> {
+        println!(
+            "\nView full antlion logs at {}",
+            self.output_dir.join(TESTBED_NAME).join("latest").display()
+        );
+        Ok(())
+    }
+}
+
+/// LocalTargetInfo performs best-effort discovery of target information from
+/// standard Fuchsia environmental variables.
+struct LocalTarget {
+    name: String,
+    ip: IpAddr,
+    ssh_key: PathBuf,
+}
+
+impl LocalTarget {
+    fn new<F>(device: Option<String>, ssh_key: Option<PathBuf>) -> Result<Self>
+    where
+        F: Finder,
+    {
+        let device_name = device.or_else(|| match std::env::var("FUCHSIA_DIR") {
+            Ok(dir) => match std::fs::read_to_string(format!("{dir}/out/default.device")) {
+                Ok(name) => Some(name.trim().to_string()),
+                Err(_) => {
+                    println!("A default device using \"fx set-device\" has not been set");
+                    println!("Using the first Fuchsia device discovered via mDNS");
+                    None
+                }
+            },
+            Err(_) => {
+                println!("Neither --device nor FUCHSIA_DIR has been set");
+                println!("Using the first Fuchsia device discovered via mDNS");
+                None
+            }
+        });
+
+        let Answer { name, ip } = F::find_device(device_name)?;
+
+        // TODO: Move this validation out to Args
+        let ssh_key = ssh_key
+            .or_else(|| home_dir().map(|p| p.join(".ssh/fuchsia_ed25519").to_path_buf()))
+            .context("Failed to detect the private Fuchsia SSH key")?;
+
+        ensure!(
+            ssh_key.try_exists().with_context(|| format!(
+                "Failed to check existence of SSH key \"{}\"",
+                ssh_key.display()
+            ))?,
+            "Cannot find SSH key \"{}\"",
+            ssh_key.display()
+        );
+
+        Ok(LocalTarget { name, ip, ssh_key })
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use super::*;
+
+    use crate::finder::{Answer, Finder};
+    use crate::run;
+    use crate::runner::{ExitStatus, Runner};
+
+    use indoc::formatdoc;
+    use pretty_assertions::assert_eq;
+    use tempfile::{NamedTempFile, TempDir};
+
+    const FUCHSIA_NAME: &'static str = "fuchsia-1234-5678-9abc";
+    const FUCHSIA_ADDR: &'static str = "fe80::1%2";
+    const FUCHSIA_IP: &'static str = "fe80::1";
+    const SCOPE_ID: u32 = 2;
+
+    struct MockFinder;
+    impl Finder for MockFinder {
+        fn find_device(_: Option<String>) -> Result<Answer> {
+            Ok(Answer {
+                name: FUCHSIA_NAME.to_string(),
+                ip: IpAddr::V6(FUCHSIA_IP.parse().unwrap(), Some(SCOPE_ID)),
+            })
+        }
+    }
+
+    #[derive(Default)]
+    struct MockRunner {
+        config: std::cell::Cell<PathBuf>,
+    }
+    impl Runner for MockRunner {
+        fn run(&self, config: PathBuf) -> Result<ExitStatus> {
+            self.config.set(config);
+            Ok(ExitStatus::Ok)
+        }
+    }
+
+    #[test]
+    fn local_invalid_ssh_key() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        assert!(LocalDriver::new::<MockFinder>(
+            None,
+            ssh.path().to_path_buf(),
+            Some(PathBuf::new()),
+            ffx.path().to_path_buf(),
+            Some(out_dir.path().to_path_buf()),
+        )
+        .is_err());
+    }
+
+    #[test]
+    fn local() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let runner = MockRunner::default();
+        let driver = LocalDriver::new::<MockFinder>(
+            None,
+            ssh.path().to_path_buf(),
+            Some(ssh_key.path().to_path_buf()),
+            ffx.path().to_path_buf(),
+            Some(out_dir.path().to_path_buf()),
+        )
+        .unwrap();
+
+        run(runner, driver, None).unwrap();
+
+        let got = std::fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
+
+        let ssh_path = ssh.path().display();
+        let ssh_key_path = ssh_key.path().display();
+        let ffx_path = ffx.path().display();
+        let out_path = out_dir.path().display();
+        let want = formatdoc! {r#"
+        TestBeds:
+        - Name: {TESTBED_NAME}
+          Controllers:
+            FuchsiaDevice:
+            - mdns_name: {FUCHSIA_NAME}
+              ip: {FUCHSIA_ADDR}
+              take_bug_report_on_fail: true
+              ssh_binary_path: {ssh_path}
+              ffx_binary_path: {ffx_path}
+              ssh_priv_key: {ssh_key_path}
+              hard_reboot_on_fail: true
+        MoblyParams:
+          LogPath: {out_path}
+        "#};
+
+        assert_eq!(got, want);
+    }
+
+    #[test]
+    fn local_with_test_params() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let runner = MockRunner::default();
+        let driver = LocalDriver::new::<MockFinder>(
+            None,
+            ssh.path().to_path_buf(),
+            Some(ssh_key.path().to_path_buf()),
+            ffx.path().to_path_buf(),
+            Some(out_dir.path().to_path_buf()),
+        )
+        .unwrap();
+
+        let params_yaml = "
+        sl4f_sanity_test_params:
+            foo: bar
+        ";
+        let params = serde_yaml::from_str(params_yaml).unwrap();
+
+        run(runner, driver, Some(params)).unwrap();
+
+        let got = std::fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
+
+        let ssh_path = ssh.path().display().to_string();
+        let ssh_key_path = ssh_key.path().display().to_string();
+        let ffx_path = ffx.path().display().to_string();
+        let out_path = out_dir.path().display();
+        let want = formatdoc! {r#"
+        TestBeds:
+        - Name: {TESTBED_NAME}
+          Controllers:
+            FuchsiaDevice:
+            - mdns_name: {FUCHSIA_NAME}
+              ip: {FUCHSIA_ADDR}
+              take_bug_report_on_fail: true
+              ssh_binary_path: {ssh_path}
+              ffx_binary_path: {ffx_path}
+              ssh_priv_key: {ssh_key_path}
+              hard_reboot_on_fail: true
+          TestParams:
+            sl4f_sanity_test_params:
+              foo: bar
+        MoblyParams:
+          LogPath: {out_path}
+        "#};
+
+        assert_eq!(got, want);
+    }
+}
diff --git a/runner/src/driver/mod.rs b/runner/src/driver/mod.rs
new file mode 100644
index 0000000..35de41f
--- /dev/null
+++ b/runner/src/driver/mod.rs
@@ -0,0 +1,24 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+pub(crate) mod infra;
+pub(crate) mod local;
+
+use crate::config::Config;
+
+use std::path::Path;
+
+use anyhow::Result;
+
+/// Driver provide insight into the information surrounding running an antlion
+/// test.
+pub(crate) trait Driver {
+    /// Path to output directory for test artifacts.
+    fn output_path(&self) -> &Path;
+    /// Antlion config for use during test.
+    fn config(&self) -> Config;
+    /// Additional logic to run after all tests run, regardless of tests passing
+    /// or failing.
+    fn teardown(&self) -> Result<()>;
+}
diff --git a/runner/src/env.rs b/runner/src/env.rs
new file mode 100644
index 0000000..ede8b74
--- /dev/null
+++ b/runner/src/env.rs
@@ -0,0 +1,25 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+use std::ffi::OsStr;
+use std::env::VarError;
+
+/// Inspection of the process's environment.
+pub(crate) trait Environment {
+	/// Fetches the environment variable `key` from the current process.
+	/// 
+	/// See [std::env::var] for details.
+	///
+	/// [std::env::var]: https://doc.rust-lang.org/std/env/fn.var.html
+    fn var<K: AsRef<OsStr>>(&self, key: K) -> Result<String, VarError>;
+}
+
+/// Query the local process's environment.
+pub(crate) struct LocalEnvironment;
+
+impl Environment for LocalEnvironment {
+    fn var<K: AsRef<OsStr>>(&self, key: K) -> Result<String, VarError> {
+        std::env::var(key)
+    }
+}
diff --git a/runner/src/finder.rs b/runner/src/finder.rs
new file mode 100644
index 0000000..c381b36
--- /dev/null
+++ b/runner/src/finder.rs
@@ -0,0 +1,200 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+use crate::net::IpAddr;
+
+use std::io;
+use std::net::{Ipv6Addr, SocketAddr, SocketAddrV6, UdpSocket};
+use std::str;
+use std::time::{Duration, Instant};
+
+use anyhow::{bail, Context, Result};
+use mdns::protocol as dns;
+use netext::{get_mcast_interfaces, IsLocalAddr, McastInterface};
+use packet::{InnerPacketBuilder, ParseBuffer};
+use socket2::{Domain, Protocol, Socket, Type};
+
+const FUCHSIA_DOMAIN: &str = "_fuchsia._udp.local";
+const MDNS_MCAST_V6: Ipv6Addr = Ipv6Addr::new(0xff02, 0, 0, 0, 0, 0, 0, 0x00fb);
+const MDNS_PORT: u16 = 5353;
+const MDNS_TIMEOUT: Duration = Duration::from_secs(10);
+
+lazy_static::lazy_static! {
+    static ref MDNS_QUERY: &'static [u8] = construct_query_buf(FUCHSIA_DOMAIN);
+}
+
+/// Find Fuchsia devices.
+pub(crate) trait Finder {
+    /// Find a Fuchsia device, preferring `device_name` if specified.
+    fn find_device(device_name: Option<String>) -> Result<Answer>;
+}
+
+/// Answer from a Finder.
+pub(crate) struct Answer {
+    /// Name of the Fuchsia device.
+    pub name: String,
+    /// IP address of the Fuchsia device.
+    pub ip: IpAddr,
+}
+
+pub(crate) struct MulticastDns {}
+
+impl Finder for MulticastDns {
+    /// Find a Fuchsia device using mDNS. If `device_name` is not specified, the
+    /// first device will be used.
+    fn find_device(device_name: Option<String>) -> Result<Answer> {
+        let interfaces =
+            get_mcast_interfaces().context("Failed to list multicast-enabled interfaces")?;
+        let interface_names =
+            interfaces.iter().map(|i| i.name.clone()).collect::<Vec<String>>().join(", ");
+        if let Some(ref d) = device_name {
+            println!("Performing mDNS discovery for {d} on interfaces: {interface_names}");
+        } else {
+            println!("Performing mDNS discovery on interfaces: {interface_names}");
+        }
+
+        let socket = create_socket(interfaces.iter()).context("Failed to create mDNS socket")?;
+
+        // TODO(http://b/264936590): Remove the race condition where the Fuchsia
+        // device can send its answer before this socket starts listening. Add an
+        // async runtime and concurrently listen for answers while sending queries.
+        send_queries(&socket, interfaces.iter()).context("Failed to send mDNS queries")?;
+        let answer = listen_for_answers(socket, device_name)?;
+
+        println!("Device {} found at {}", answer.name, answer.ip);
+        Ok(answer)
+    }
+}
+
+fn construct_query_buf(service: &str) -> &'static [u8] {
+    let question = dns::QuestionBuilder::new(
+        dns::DomainBuilder::from_str(service).unwrap(),
+        dns::Type::Ptr,
+        dns::Class::In,
+        true,
+    );
+
+    let mut message = dns::MessageBuilder::new(0, true);
+    message.add_question(question);
+
+    let mut buf = vec![0; message.bytes_len()];
+    message.serialize(buf.as_mut_slice());
+    Box::leak(buf.into_boxed_slice())
+}
+
+/// Create a socket for both sending and listening on all multicast-capable
+/// interfaces.
+fn create_socket<'a>(interfaces: impl Iterator<Item = &'a McastInterface>) -> Result<Socket> {
+    let socket = Socket::new(Domain::IPV6, Type::DGRAM, Some(Protocol::UDP))?;
+    let read_timeout = Duration::from_millis(100);
+    socket
+        .set_read_timeout(Some(read_timeout))
+        .with_context(|| format!("Failed to set SO_RCVTIMEO to {}ms", read_timeout.as_millis()))?;
+    socket.set_only_v6(true).context("Failed to set IPV6_V6ONLY")?;
+    socket.set_reuse_address(true).context("Failed to set SO_REUSEADDR")?;
+    socket.set_reuse_port(true).context("Failed to set SO_REUSEPORT")?;
+
+    for interface in interfaces {
+        // Listen on all multicast-enabled interfaces
+        match interface.id() {
+            Ok(id) => match socket.join_multicast_v6(&MDNS_MCAST_V6, id) {
+                Ok(()) => {}
+                Err(e) => eprintln!("Failed to join mDNS multicast group on interface {id}: {e}"),
+            },
+            Err(e) => eprintln!("Failed to listen on interface {}: {}", interface.name, e),
+        }
+    }
+
+    socket
+        .bind(&SocketAddrV6::new(Ipv6Addr::UNSPECIFIED, 0, 0, 0).into())
+        .with_context(|| format!("Failed to bind to unspecified IPv6"))?;
+
+    Ok(socket)
+}
+
+fn send_queries<'a>(
+    socket: &Socket,
+    interfaces: impl Iterator<Item = &'a McastInterface>,
+) -> Result<()> {
+    let to_addr = SocketAddrV6::new(MDNS_MCAST_V6, MDNS_PORT, 0, 0).into();
+
+    for interface in interfaces {
+        let id = interface
+            .id()
+            .with_context(|| format!("Failed to get interface ID for {}", interface.name))?;
+        socket
+            .set_multicast_if_v6(id)
+            .with_context(|| format!("Failed to set multicast interface for {}", interface.name))?;
+        for addr in &interface.addrs {
+            if let SocketAddr::V6(addr_v6) = addr {
+                if !addr.ip().is_local_addr() || addr.ip().is_loopback() {
+                    continue;
+                }
+                if let Err(e) = socket.send_to(&MDNS_QUERY, &to_addr) {
+                    eprintln!(
+                        "Failed to send mDNS query out {} via {}: {e}",
+                        interface.name,
+                        addr_v6.ip()
+                    );
+                    continue;
+                }
+            }
+        }
+    }
+    Ok(())
+}
+
+fn listen_for_answers(socket: Socket, device_name: Option<String>) -> Result<Answer> {
+    let s: UdpSocket = socket.into();
+    let mut buf = [0; 1500];
+
+    let end = Instant::now() + MDNS_TIMEOUT;
+    while Instant::now() < end {
+        match s.recv_from(&mut buf) {
+            Ok((packet_bytes, src_sock_addr)) => {
+                if !src_sock_addr.ip().is_local_addr() {
+                    continue;
+                }
+
+                let mut packet_buf = &mut buf[..packet_bytes];
+                match packet_buf.parse::<dns::Message<_>>() {
+                    Ok(message) => {
+                        if !message.answers.iter().any(|a| a.domain == FUCHSIA_DOMAIN) {
+                            continue;
+                        }
+                        for answer in message.additional {
+                            if let Some(std::net::IpAddr::V6(addr)) = answer.rdata.ip_addr() {
+                                if let SocketAddr::V6(src_v6) = src_sock_addr {
+                                    let name = answer
+                                        .domain
+                                        .to_string()
+                                        .trim_end_matches(".local")
+                                        .to_string();
+                                    let scope_id = src_v6.scope_id();
+
+                                    if let Some(ref device) = device_name {
+                                        if &name != device {
+                                            println!("Found irrelevant device {name} at {addr}%{scope_id}");
+                                            continue;
+                                        }
+                                    }
+
+                                    return Ok(Answer {
+                                        name,
+                                        ip: IpAddr::V6(addr, Some(scope_id)),
+                                    });
+                                }
+                            }
+                        }
+                    }
+                    Err(err) => eprintln!("Failed to parse mDNS packet: {err:?}"),
+                }
+            }
+            Err(err) if err.kind() == io::ErrorKind::WouldBlock => {}
+            Err(err) => return Err(err.into()),
+        }
+    }
+
+    bail!("device {device_name:?} not found")
+}
diff --git a/runner/src/main.rs b/runner/src/main.rs
new file mode 100644
index 0000000..4252694
--- /dev/null
+++ b/runner/src/main.rs
@@ -0,0 +1,149 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+mod config;
+mod driver;
+mod env;
+mod finder;
+mod net;
+mod runner;
+mod yaml;
+
+use crate::driver::infra::{InfraDriver, InfraDriverError};
+use crate::runner::ExitStatus;
+
+use std::fs::File;
+use std::path::PathBuf;
+use std::{fs, process::ExitCode};
+
+use anyhow::{Context, Result};
+use argh::FromArgs;
+use serde_yaml;
+use serde_yaml::Value;
+
+#[derive(FromArgs)]
+/// antlion runner with config generation
+struct Args {
+    /// name of the Fuchsia device to use for testing; defaults to using mDNS
+    /// discovery
+    #[argh(option)]
+    device: Option<String>,
+
+    /// path to the SSH binary used to communicate with all devices
+    #[argh(option, from_str_fn(parse_file))]
+    ssh_binary: PathBuf,
+
+    /// path to the SSH private key used to communicate with Fuchsia; defaults
+    /// to ~/.ssh/fuchsia_ed25519
+    #[argh(option, from_str_fn(parse_file))]
+    ssh_key: Option<PathBuf>,
+
+    /// path to the FFX binary used to communicate with Fuchsia
+    #[argh(option, from_str_fn(parse_file))]
+    ffx_binary: PathBuf,
+
+    /// path to the python interpreter binary (e.g. /bin/python3.9)
+    #[argh(option)]
+    python_bin: String,
+
+    /// path to the antlion zipapp, ending in .pyz
+    #[argh(option, from_str_fn(parse_file))]
+    antlion_pyz: PathBuf,
+
+    /// path to a directory for outputting artifacts; defaults to the current
+    /// working directory or FUCHSIA_TEST_OUTDIR
+    #[argh(option, from_str_fn(parse_directory))]
+    out_dir: Option<PathBuf>,
+
+    /// path to additional YAML config for this test; placed in the
+    /// "test_params" key in the antlion config
+    #[argh(option, from_str_fn(parse_file))]
+    test_params: Option<PathBuf>,
+}
+
+fn parse_file(s: &str) -> Result<PathBuf, String> {
+    let path = PathBuf::from(s);
+    let _ = File::open(&path).map_err(|e| format!("Failed to open \"{s}\": {e}"))?;
+    Ok(path)
+}
+
+fn parse_directory(s: &str) -> Result<PathBuf, String> {
+    let path = PathBuf::from(s);
+    let meta =
+        std::fs::metadata(&path).map_err(|e| format!("Failed to read metadata of \"{s}\": {e}"))?;
+    if meta.is_file() {
+        return Err(format!("Expected a directory but found a file at \"{s}\""));
+    }
+    Ok(path)
+}
+
+fn run<R, D>(runner: R, driver: D, test_params: Option<Value>) -> Result<ExitCode>
+where
+    R: runner::Runner,
+    D: driver::Driver,
+{
+    let mut config = driver.config();
+    if let Some(params) = test_params {
+        config.merge_test_params(params);
+    }
+
+    let yaml =
+        serde_yaml::to_string(&config).context("Failed to convert antlion config to YAML")?;
+
+    let output_path = driver.output_path().to_path_buf();
+    let config_path = output_path.join("config.yaml");
+    println!("Writing {}", config_path.display());
+    println!("\n{yaml}\n");
+    fs::write(&config_path, yaml).context("Failed to write config to file")?;
+
+    let exit_code = runner.run(config_path).context("Failed to run antlion")?;
+    match exit_code {
+        ExitStatus::Ok => println!("Antlion successfully exited"),
+        ExitStatus::Err(code) => eprintln!("Antlion failed with status code {}", code),
+        ExitStatus::Interrupt(Some(code)) => eprintln!("Antlion interrupted by signal {}", code),
+        ExitStatus::Interrupt(None) => eprintln!("Antlion interrupted by signal"),
+    };
+    driver.teardown().context("Failed to teardown environment")?;
+    Ok(exit_code.into())
+}
+
+fn main() -> Result<ExitCode> {
+    let args: Args = argh::from_env();
+    let env = env::LocalEnvironment;
+    let runner =
+        runner::ProcessRunner { python_bin: args.python_bin, antlion_pyz: args.antlion_pyz };
+
+    let test_params = match args.test_params {
+        Some(path) => {
+            let text = fs::read_to_string(&path)
+                .with_context(|| format!("Failed to read file \"{}\"", path.display()))?;
+            let yaml = serde_yaml::from_str(&text)
+                .with_context(|| format!("Failed to parse \"{text}\" as YAML"))?;
+            Some(yaml)
+        }
+        None => None,
+    };
+
+    match InfraDriver::new(env, args.ssh_binary.clone(), args.ffx_binary.clone()) {
+        Ok(env) => return run(runner, env, test_params),
+        Err(InfraDriverError::NotDetected(_)) => {}
+        Err(InfraDriverError::Config(e)) => {
+            return Err(anyhow::Error::from(e).context("Config validation"))
+        }
+        Err(InfraDriverError::Other(e)) => {
+            return Err(anyhow::Error::from(e).context("Unexpected infra driver error"))
+        }
+    };
+
+    let env = driver::local::LocalDriver::new::<finder::MulticastDns>(
+        args.device.clone(),
+        args.ssh_binary.clone(),
+        args.ssh_key.clone(),
+        args.ffx_binary.clone(),
+        args.out_dir.clone(),
+    )
+    .context("Failed to detect local environment")?;
+
+    run(runner, env, test_params)
+}
diff --git a/runner/src/net.rs b/runner/src/net.rs
new file mode 100644
index 0000000..70db2eb
--- /dev/null
+++ b/runner/src/net.rs
@@ -0,0 +1,231 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+use std::fmt::{Debug, Display};
+use std::marker::PhantomData;
+use std::net::{Ipv4Addr, Ipv6Addr};
+
+use netext::IsLocalAddr;
+use nix::net::if_::if_nametoindex;
+use serde::{Deserialize, Serialize};
+use thiserror::Error;
+
+/// IP address with support for IPv6 scope identifiers as defined in RFC 4007.
+#[derive(Copy, Clone, Eq, PartialEq, Hash, PartialOrd, Ord)]
+pub enum IpAddr {
+    /// An IPv4 address.
+    V4(Ipv4Addr),
+    /// An IPv6 address with optional scope identifier.
+    V6(Ipv6Addr, Option<u32>),
+}
+
+impl Into<std::net::IpAddr> for IpAddr {
+    fn into(self) -> std::net::IpAddr {
+        match self {
+            IpAddr::V4(ip) => std::net::IpAddr::from(ip),
+            IpAddr::V6(ip, _) => std::net::IpAddr::from(ip),
+        }
+    }
+}
+
+impl From<Ipv6Addr> for IpAddr {
+    fn from(value: Ipv6Addr) -> Self {
+        IpAddr::V6(value, None)
+    }
+}
+
+impl From<Ipv4Addr> for IpAddr {
+    fn from(value: Ipv4Addr) -> Self {
+        IpAddr::V4(value)
+    }
+}
+
+impl From<std::net::IpAddr> for IpAddr {
+    fn from(value: std::net::IpAddr) -> Self {
+        match value {
+            std::net::IpAddr::V4(ip) => IpAddr::from(ip),
+            std::net::IpAddr::V6(ip) => IpAddr::from(ip),
+        }
+    }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Error)]
+/// An error which can be returned when parsing an IP address with optional IPv6
+/// scope ID. See [`std::net::AddrParseError`].
+pub enum AddrParseError {
+    #[error(transparent)]
+    IpInvalid(#[from] std::net::AddrParseError),
+    #[error("no interface found with name \"{0}\"")]
+    InterfaceNotFound(String),
+    #[error("only IPv6 link-local may include a scope ID")]
+    /// Scope IDs are only supported for IPv6 link-local addresses as per RFC
+    /// 6874 Section 4.
+    ScopeNotSupported,
+}
+
+impl std::str::FromStr for IpAddr {
+    type Err = AddrParseError;
+
+    fn from_str(s: &str) -> Result<Self, Self::Err> {
+        let mut parts = s.splitn(2, '%');
+        let addr = parts.next().unwrap(); // first element is guaranteed
+        let ip = std::net::IpAddr::from_str(addr)?;
+        let scope = parts.next();
+        match (ip, scope) {
+            (std::net::IpAddr::V4(ip), None) => Ok(IpAddr::from(ip)),
+            (std::net::IpAddr::V4(_), Some(_)) => Err(AddrParseError::ScopeNotSupported),
+            (std::net::IpAddr::V6(ip), None) => Ok(IpAddr::V6(ip, None)),
+            (std::net::IpAddr::V6(ip), Some(scope)) => {
+                if !ip.is_link_local_addr() {
+                    return Err(AddrParseError::ScopeNotSupported);
+                }
+                if let Ok(index) = scope.parse::<u32>() {
+                    return Ok(IpAddr::V6(ip, Some(index)));
+                }
+                match if_nametoindex(scope) {
+                    Ok(index) => Ok(IpAddr::V6(ip, Some(index))),
+                    Err(_) => Err(AddrParseError::InterfaceNotFound(scope.to_string())),
+                }
+            }
+        }
+    }
+}
+
+impl Display for IpAddr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        match self {
+            IpAddr::V4(ip) => Display::fmt(ip, f),
+            IpAddr::V6(ip, None) => Display::fmt(ip, f),
+            IpAddr::V6(ip, Some(scope)) => {
+                Display::fmt(ip, f)?;
+                write!(f, "%{}", scope)
+            }
+        }
+    }
+}
+
+impl Debug for IpAddr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        Display::fmt(self, f)
+    }
+}
+
+impl Serialize for IpAddr {
+    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+    where
+        S: serde::Serializer,
+    {
+        serializer.serialize_str(self.to_string().as_str())
+    }
+}
+
+impl<'de> Deserialize<'de> for IpAddr {
+    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+    where
+        D: serde::Deserializer<'de>,
+    {
+        deserializer.deserialize_str(FromStrVisitor::new())
+    }
+}
+
+struct FromStrVisitor<T> {
+    ty: PhantomData<T>,
+}
+
+impl<T> FromStrVisitor<T> {
+    fn new() -> Self {
+        FromStrVisitor { ty: PhantomData }
+    }
+}
+
+impl<'de, T> serde::de::Visitor<'de> for FromStrVisitor<T>
+where
+    T: std::str::FromStr,
+    T::Err: std::fmt::Display,
+{
+    type Value = T;
+
+    fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        formatter.write_str("IP address")
+    }
+
+    fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
+    where
+        E: serde::de::Error,
+    {
+        s.parse().map_err(serde::de::Error::custom)
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use super::{AddrParseError, IpAddr};
+    use assert_matches::assert_matches;
+
+    #[test]
+    fn parse_ip_invalid() {
+        assert_matches!("".parse::<IpAddr>(), Err(AddrParseError::IpInvalid(_)));
+        assert_matches!("192.168.1.".parse::<IpAddr>(), Err(AddrParseError::IpInvalid(_)));
+        assert_matches!("fe80:".parse::<IpAddr>(), Err(AddrParseError::IpInvalid(_)));
+    }
+
+    #[test]
+    fn parse_ipv4() {
+        assert_matches!(
+            "192.168.1.1".parse::<IpAddr>(),
+            Ok(IpAddr::V4(ip))
+                if ip == "192.168.1.1".parse::<std::net::Ipv4Addr>().unwrap()
+        );
+    }
+
+    #[test]
+    fn parse_ipv4_with_scope() {
+        assert_matches!(
+            "192.168.1.1%1".parse::<IpAddr>(),
+            Err(AddrParseError::ScopeNotSupported)
+        );
+    }
+
+    #[test]
+    fn parse_ipv6() {
+        assert_matches!(
+            "fe80::1".parse::<IpAddr>(),
+            Ok(IpAddr::V6(ip, None))
+                if ip == "fe80::1".parse::<std::net::Ipv6Addr>().unwrap()
+        );
+    }
+
+    #[test]
+    fn parse_ipv6_global_with_scope() {
+        assert_matches!("2001::1%1".parse::<IpAddr>(), Err(AddrParseError::ScopeNotSupported));
+    }
+
+    #[test]
+    fn parse_ipv6_link_local_with_scope() {
+        assert_matches!(
+            "fe80::1%1".parse::<IpAddr>(),
+            Ok(IpAddr::V6(ip, Some(scope)))
+                if ip == "fe80::1".parse::<std::net::Ipv6Addr>().unwrap()
+                && scope == 1
+        );
+    }
+
+    #[test]
+    fn parse_ipv6_link_local_with_scope_interface_not_found() {
+        // An empty scope ID should trigger a failed lookup.
+        assert_matches!(
+            "fe80::1%".parse::<IpAddr>(),
+            Err(AddrParseError::InterfaceNotFound(name))
+                if name == ""
+        );
+
+        // The trailing '%' forces a failed lookup. At the time of writing, no
+        // OS supports this character as part of interface names.
+        assert_matches!(
+            "fe80::1%eth0%".parse::<IpAddr>(),
+            Err(AddrParseError::InterfaceNotFound(name))
+                if name == "eth0%"
+        );
+    }
+}
diff --git a/runner/src/runner.rs b/runner/src/runner.rs
new file mode 100644
index 0000000..c40e05d
--- /dev/null
+++ b/runner/src/runner.rs
@@ -0,0 +1,83 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#[cfg(unix)]
+use std::os::unix::process::ExitStatusExt;
+use std::process::Command;
+use std::{path::PathBuf, process::ExitCode};
+
+use anyhow::{Context, Result};
+use itertools::Itertools;
+
+/// Runner for dispatching antlion.
+pub(crate) trait Runner {
+    /// Run antlion using the provided config and output directory.
+    fn run(&self, config: PathBuf) -> Result<ExitStatus>;
+}
+
+/// Executes antlion as a local process.
+pub(crate) struct ProcessRunner {
+    pub python_bin: String,
+    pub antlion_pyz: PathBuf,
+}
+
+impl Runner for ProcessRunner {
+    fn run(&self, config: PathBuf) -> Result<ExitStatus> {
+        let args = [
+            &self.antlion_pyz.clone().into_os_string().into_string().unwrap(),
+            "--config",
+            &config.into_os_string().into_string().unwrap(),
+        ];
+
+        println!(
+            "Launching antlion to run: \"{} {}\"\n",
+            &self.python_bin,
+            args.iter().format(" "),
+        );
+
+        let status = Command::new(&self.python_bin)
+            .args(args)
+            .status()
+            .context("Failed to execute antlion")?;
+
+        Ok(ExitStatus::from(status))
+    }
+}
+
+/// Describes the result of a child process after it has terminated.
+pub(crate) enum ExitStatus {
+    /// Process terminated without error.
+    Ok,
+    /// Process terminated with a non-zero status code.
+    Err(i32),
+    /// Process was interrupted by a signal.
+    Interrupt(Option<i32>),
+}
+
+impl From<std::process::ExitStatus> for ExitStatus {
+    fn from(status: std::process::ExitStatus) -> Self {
+        match status.code() {
+            Some(0) => ExitStatus::Ok,
+            Some(code) => ExitStatus::Err(code),
+            None if cfg!(target_os = "unix") => ExitStatus::Interrupt(status.signal()),
+            None => ExitStatus::Interrupt(None),
+        }
+    }
+}
+
+impl Into<ExitCode> for ExitStatus {
+    fn into(self) -> ExitCode {
+        match self {
+            ExitStatus::Ok => ExitCode::SUCCESS,
+            ExitStatus::Err(code) => {
+                let code = match u8::try_from(code) {
+                    Ok(c) => c,
+                    Err(_) => 1,
+                };
+                ExitCode::from(code)
+            }
+            ExitStatus::Interrupt(_) => ExitCode::FAILURE,
+        }
+    }
+}
diff --git a/runner/src/yaml.rs b/runner/src/yaml.rs
new file mode 100644
index 0000000..ae972bf
--- /dev/null
+++ b/runner/src/yaml.rs
@@ -0,0 +1,95 @@
+use serde_yaml::Value;
+
+/// Merge `b` into `a`, appending arrays and overwriting everything else.
+pub fn merge(a: &mut Value, b: Value) {
+    match (a, b) {
+        (Value::Mapping(ref mut a), Value::Mapping(b)) => {
+            for (k, v) in b {
+                if !a.contains_key(&k) {
+                    a.insert(k, v);
+                } else {
+                    merge(&mut a[&k], v);
+                }
+            }
+        }
+        (Value::Sequence(ref mut a), Value::Sequence(ref mut b)) => {
+            a.append(b);
+        }
+        (a, b) => *a = b,
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use super::*;
+
+    #[test]
+    fn test_merge_mapping() {
+        let a = "
+            test_params:
+                name: a
+                who_called:
+                    was_a: true
+        ";
+        let mut a: Value = serde_yaml::from_str(a).unwrap();
+        let b = "
+            test_params:
+                name: b
+                who_called:
+                    was_b: true
+        ";
+        let b: Value = serde_yaml::from_str(b).unwrap();
+        merge(&mut a, b);
+        let want = "
+            test_params:
+                name: b
+                who_called:
+                    was_a: true
+                    was_b: true
+        ";
+        let want: Value = serde_yaml::from_str(want).unwrap();
+        assert_eq!(a, want);
+    }
+
+    #[test]
+    fn test_merge_append_arrays() {
+        let mut a: Value = serde_yaml::from_str(" - a").unwrap();
+        let b: Value = serde_yaml::from_str(" - b").unwrap();
+        merge(&mut a, b);
+        let want = "
+            - a
+            - b
+        ";
+        let want: Value = serde_yaml::from_str(want).unwrap();
+        assert_eq!(a, want);
+    }
+
+    #[test]
+    fn test_merge_append_arrays_allow_duplicates() {
+        let mut a: Value = serde_yaml::from_str(" - a").unwrap();
+        let b: Value = serde_yaml::from_str(" - a").unwrap();
+        merge(&mut a, b);
+        let want = "
+            - a
+            - a
+        ";
+        let want: Value = serde_yaml::from_str(want).unwrap();
+        assert_eq!(a, want);
+    }
+
+    #[test]
+    fn test_merge_overwrite_from_null() {
+        let mut a: Value = Value::Null;
+        let b: Value = serde_yaml::from_str("true").unwrap();
+        merge(&mut a, b.clone());
+        assert_eq!(a, b);
+    }
+
+    #[test]
+    fn test_merge_overwrite_with_null() {
+        let mut a: Value = serde_yaml::from_str("true").unwrap();
+        let b: Value = Value::Null;
+        merge(&mut a, b.clone());
+        assert_eq!(a, b);
+    }
+}
diff --git a/setup.py b/setup.py
index 30f198d..28f080f 100644
--- a/setup.py
+++ b/setup.py
@@ -20,33 +20,32 @@
     "mobly==1.12.0",
     "pyyaml>=5.1",
     "tenacity~=8.0",
+    # TODO(b/240443856): Remove these dependencies once antlion runs in
+    # Fuchsia's LUCI infrastructure. These are needed for flashing and using
+    # mDNS discovery, which are unnecessary in the future infrastructure.
+    "usbinfo",
+    "psutil",
+    "zeroconf",
 ]
 
-setup(name='antlion',
-      version='0.2.0',
-      description = "Host-driven, hardware-agnostic Fuchsia connectivity tests",
-      license='Apache-2.0',
-      packages=find_packages(
-          where='src',
-      ),
-      package_dir={"": "src"},
-      include_package_data=True,
-      tests_require=[],
-      install_requires=install_requires,
-      extras_require={
-          'html_graphing': ['bokeh'],
-          'dev': ['shiv', 'toml', 'yapf'],
-          'digital_loggers_pdu': ['dlipower'],
-          'flash': ['usbinfo'],
-          'mdns': ['psutil', 'zeroconf'],
-          'android': [
-              'Monsoon',
-              'numpy',
-              'paramiko[ed25519]',
-              'pylibftdi',
-              'pyserial',
-              'requests',
-              'scapy',
-              'scipy',
-          ],
-      })
+setup(
+    name="antlion",
+    version="0.2.0",
+    description="Host-driven, hardware-agnostic Fuchsia connectivity tests",
+    license="Apache-2.0",
+    packages=find_packages(
+        where="src",
+    ),
+    package_dir={"": "src"},
+    include_package_data=True,
+    tests_require=[],
+    install_requires=install_requires,
+    extras_require={
+        "html_graphing": ["bokeh"],
+        "digital_loggers_pdu": ["dlipower"],
+        "android": [
+            "numpy",
+            "scapy",
+        ],
+    },
+)
diff --git a/src/antlion/asserts.py b/src/antlion/asserts.py
deleted file mode 100644
index ce0a7b1..0000000
--- a/src/antlion/asserts.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from mobly.asserts import *
-
-
-# Have an instance of unittest.TestCase so we could reuse some logic from
-# python's own unittest.
-# _ProxyTest is required because py2 does not allow instantiating
-# unittest.TestCase directly.
-class _ProxyTest(unittest.TestCase):
-    def runTest(self):
-        pass
-
-
-_pyunit_proxy = _ProxyTest()
-
-
-def assert_almost_equal(first,
-                        second,
-                        places=7,
-                        msg=None,
-                        delta=None,
-                        extras=None):
-    """
-    Assert FIRST to be within +/- DELTA to SECOND, otherwise fail the
-    test.
-    :param first: The first argument, LHS
-    :param second: The second argument, RHS
-    :param places: For floating points, how many decimal places to look into
-    :param msg: Message to display on failure
-    :param delta: The +/- first and second could be apart from each other
-    :param extras: Extra object passed to test failure handler
-    :return:
-    """
-    my_msg = None
-    try:
-        if delta:
-            _pyunit_proxy.assertAlmostEqual(
-                first, second, msg=msg, delta=delta)
-        else:
-            _pyunit_proxy.assertAlmostEqual(
-                first, second, places=places, msg=msg)
-    except Exception as e:
-        my_msg = str(e)
-        if msg:
-            my_msg = "%s %s" % (my_msg, msg)
-    # This is a hack to remove the stacktrace produced by the above exception.
-    if my_msg is not None:
-        fail(my_msg, extras=extras)
diff --git a/src/antlion/base_test.py b/src/antlion/base_test.py
index 5e39933..5033552 100755
--- a/src/antlion/base_test.py
+++ b/src/antlion/base_test.py
@@ -22,7 +22,6 @@
 import traceback
 from concurrent.futures import ThreadPoolExecutor
 
-from antlion import asserts
 from antlion import error
 from antlion import keys
 from antlion import logger
@@ -39,6 +38,7 @@
 from antlion.event.event import TestClassEndEvent
 from antlion.event.subscription_bundle import SubscriptionBundle
 
+from mobly import asserts
 from mobly.base_test import BaseTestClass as MoblyBaseTest
 from mobly.records import ExceptionRecord
 
@@ -50,26 +50,24 @@
 @subscribe_static(TestCaseBeginEvent)
 def _logcat_log_test_begin(event):
     """Ensures that logcat is running. Write a logcat line indicating test case
-     begin."""
+    begin."""
     test_instance = event.test_class
     try:
-        for ad in getattr(test_instance, 'android_devices', []):
+        for ad in getattr(test_instance, "android_devices", []):
             if not ad.is_adb_logcat_on:
                 ad.start_adb_logcat()
             # Write test start token to adb log if android device is attached.
             if not ad.skip_sl4a and ad.droid:
-                ad.droid.logV("%s BEGIN %s" %
-                              (TEST_CASE_TOKEN, event.test_case_name))
+                ad.droid.logV("%s BEGIN %s" % (TEST_CASE_TOKEN, event.test_case_name))
 
     except error.ActsError as e:
         test_instance.results.error.append(
-            ExceptionRecord(e, 'Logcat for test begin: %s' %
-                            event.test_case_name))
-        test_instance.log.error('BaseTest setup_test error: %s' % e.details)
+            ExceptionRecord(e, "Logcat for test begin: %s" % event.test_case_name)
+        )
+        test_instance.log.error("BaseTest setup_test error: %s" % e.details)
     except Exception as e:
-        test_instance.log.warning(
-            'Unable to send BEGIN log command to all devices.')
-        test_instance.log.warning('Error: %s' % e)
+        test_instance.log.warning("Unable to send BEGIN log command to all devices.")
+        test_instance.log.warning("Error: %s" % e)
 
 
 @subscribe_static(TestCaseEndEvent)
@@ -78,20 +76,18 @@
     test_instance = event.test_class
     try:
         # Write test end token to adb log if android device is attached.
-        for ad in getattr(test_instance, 'android_devices', []):
+        for ad in getattr(test_instance, "android_devices", []):
             if not ad.skip_sl4a and ad.droid:
-                ad.droid.logV("%s END %s" %
-                              (TEST_CASE_TOKEN, event.test_case_name))
+                ad.droid.logV("%s END %s" % (TEST_CASE_TOKEN, event.test_case_name))
 
     except error.ActsError as e:
         test_instance.results.error.append(
-            ExceptionRecord(e,
-                            'Logcat for test end: %s' % event.test_case_name))
-        test_instance.log.error('BaseTest teardown_test error: %s' % e.details)
+            ExceptionRecord(e, "Logcat for test end: %s" % event.test_case_name)
+        )
+        test_instance.log.error("BaseTest teardown_test error: %s" % e.details)
     except Exception as e:
-        test_instance.log.warning(
-            'Unable to send END log command to all devices.')
-        test_instance.log.warning('Error: %s' % e)
+        test_instance.log.warning("Unable to send END log command to all devices.")
+        test_instance.log.warning("Error: %s" % e)
 
 
 @subscribe_static(TestCaseBeginEvent)
@@ -100,15 +96,15 @@
     Fuchsia device"""
     test_instance = event.test_class
     try:
-        for fd in getattr(test_instance, 'fuchsia_devices', []):
-            if hasattr(fd, '_sl4f'):
+        for fd in getattr(test_instance, "fuchsia_devices", []):
+            if hasattr(fd, "_sl4f"):
                 fd.sl4f.logging_lib.logI(
-                    "%s BEGIN %s" % (TEST_CASE_TOKEN, event.test_case_name))
+                    "%s BEGIN %s" % (TEST_CASE_TOKEN, event.test_case_name)
+                )
 
     except Exception as e:
-        test_instance.log.warning(
-            'Unable to send BEGIN log command to all devices.')
-        test_instance.log.warning('Error: %s' % e)
+        test_instance.log.warning("Unable to send BEGIN log command to all devices.")
+        test_instance.log.warning("Error: %s" % e)
 
 
 @subscribe_static(TestCaseEndEvent)
@@ -117,15 +113,15 @@
     Fuchsia device"""
     test_instance = event.test_class
     try:
-        for fd in getattr(test_instance, 'fuchsia_devices', []):
-            if hasattr(fd, '_sl4f'):
+        for fd in getattr(test_instance, "fuchsia_devices", []):
+            if hasattr(fd, "_sl4f"):
                 fd.sl4f.logging_lib.logI(
-                    "%s END %s" % (TEST_CASE_TOKEN, event.test_case_name))
+                    "%s END %s" % (TEST_CASE_TOKEN, event.test_case_name)
+                )
 
     except Exception as e:
-        test_instance.log.warning(
-            'Unable to send END log command to all devices.')
-        test_instance.log.warning('Error: %s' % e)
+        test_instance.log.warning("Unable to send END log command to all devices.")
+        test_instance.log.warning("Error: %s" % e)
 
 
 event_bus.register_subscription(_logcat_log_test_begin.subscription)
@@ -193,7 +189,8 @@
 
         self.consecutive_failures = 0
         self.consecutive_failure_limit = self.user_params.get(
-            'consecutive_failure_limit', -1)
+            "consecutive_failure_limit", -1
+        )
         self.size_limit_reached = False
         self.retryable_exceptions = signals.TestFailure
 
@@ -211,8 +208,7 @@
         for ctrl_name in keys.Config.builtin_controller_names.value:
             if ctrl_name in self.controller_configs:
                 module_name = keys.get_module_name(ctrl_name)
-                module = importlib.import_module("antlion.controllers.%s" %
-                                                 module_name)
+                module = importlib.import_module("antlion.controllers.%s" % module_name)
                 builtin_controllers.append(module)
         return builtin_controllers
 
@@ -224,7 +220,7 @@
         without needing to make changes.
         """
         for key, value in self.user_params.items():
-            if key.endswith('files') and isinstance(value, dict):
+            if key.endswith("files") and isinstance(value, dict):
                 new_user_params = dict(value)
                 new_user_params.update(self.user_params)
                 self.user_params = new_user_params
@@ -243,15 +239,12 @@
         Returns:
             A string corresponding to the module's name.
         """
-        if hasattr(a_module, 'ACTS_CONTROLLER_REFERENCE_NAME'):
+        if hasattr(a_module, "ACTS_CONTROLLER_REFERENCE_NAME"):
             return a_module.ACTS_CONTROLLER_REFERENCE_NAME
         else:
-            return a_module.__name__.split('.')[-1]
+            return a_module.__name__.split(".")[-1]
 
-    def register_controller(self,
-                            controller_module,
-                            required=True,
-                            builtin=False):
+    def register_controller(self, controller_module, required=True, builtin=False):
         """Registers an ACTS controller module for a test class. Invokes Mobly's
         implementation of register_controller.
 
@@ -318,7 +311,8 @@
 
         # Get controller objects from Mobly's register_controller
         controllers = self._controller_manager.register_controller(
-            controller_module, required=required)
+            controller_module, required=required
+        )
         if not controllers:
             return None
 
@@ -326,8 +320,7 @@
         # Implementation of "get_info" is optional for a controller module.
         if hasattr(controller_module, "get_info"):
             controller_info = controller_module.get_info(controllers)
-            self.log.info("Controller %s: %s", module_config_name,
-                          controller_info)
+            self.log.info("Controller %s: %s", module_config_name, controller_info)
 
         if builtin:
             setattr(self, module_ref_name, controllers)
@@ -359,7 +352,7 @@
 
         # Skip the test if the consecutive test case failure limit is reached.
         if self.consecutive_failures == self.consecutive_failure_limit:
-            raise signals.TestError('Consecutive test failure')
+            raise signals.TestError("Consecutive test failure")
 
         return self.setup_test()
 
@@ -379,7 +372,7 @@
         """Proxy function to guarantee the base implementation of teardown_test
         is called.
         """
-        self.log.debug('Tearing down test %s' % test_name)
+        self.log.debug("Tearing down test %s" % test_name)
         self.teardown_test()
 
     def _on_fail(self, record):
@@ -502,8 +495,11 @@
         except signals.TestAbortAll:
             raise
         except Exception as e:
-            self.log.exception("Exception happened when executing %s for %s.",
-                               func.__name__, self.current_test_name)
+            self.log.exception(
+                "Exception happened when executing %s for %s.",
+                func.__name__,
+                self.current_test_name,
+            )
             tr_record.add_error(func.__name__, e)
 
     def exec_one_testcase(self, test_name, test_func):
@@ -527,8 +523,8 @@
         self.log.info("%s %s", TEST_CASE_TOKEN, test_name)
 
         # Enable test retry if specified in the ACTS config
-        retry_tests = self.user_params.get('retry_tests', [])
-        full_test_name = '%s.%s' % (class_name, self.test_name)
+        retry_tests = self.user_params.get("retry_tests", [])
+        full_test_name = "%s.%s" % (class_name, self.test_name)
         if any(name in retry_tests for name in [class_name, full_test_name]):
             test_func = self.get_func_with_retry(test_func)
 
@@ -537,8 +533,9 @@
         try:
             try:
                 ret = self._setup_test(self.test_name)
-                asserts.assert_true(ret is not False,
-                                    "Setup for %s failed." % test_name)
+                asserts.assert_true(
+                    ret is not False, "Setup for %s failed." % test_name
+                )
                 verdict = test_func()
             finally:
                 try:
@@ -551,7 +548,8 @@
         except (signals.TestFailure, AssertionError) as e:
             test_signal = e
             if self.user_params.get(
-                    keys.Config.key_test_failure_tracebacks.value, False):
+                keys.Config.key_test_failure_tracebacks.value, False
+            ):
                 self.log.exception(e)
             tr_record.test_fail(e)
         except signals.TestSkip as e:
@@ -594,11 +592,11 @@
                     self._exec_procedure_func(self._on_fail, tr_record)
             finally:
                 self.results.add_record(tr_record)
-                self.summary_writer.dump(tr_record.to_dict(),
-                                         records.TestSummaryEntryType.RECORD)
+                self.summary_writer.dump(
+                    tr_record.to_dict(), records.TestSummaryEntryType.RECORD
+                )
                 self.current_test_name = None
-                event_bus.post(
-                    TestCaseEndEvent(self, self.test_name, test_signal))
+                event_bus.post(TestCaseEndEvent(self, self.test_name, test_signal))
 
     def get_func_with_retry(self, func, attempts=2):
         """Returns a wrapped test method that re-runs after failure. Return test
@@ -626,23 +624,25 @@
                     return func(*args, **kwargs)
                 except exceptions as e:
                     retry = True
-                    msg = 'Failure on attempt %d: %s' % (i + 1, e.details)
+                    msg = "Failure on attempt %d: %s" % (i + 1, e.details)
                     self.log.warning(msg)
                     error_msgs.append(msg)
                     if e.extras:
-                        extras['Attempt %d' % (i + 1)] = e.extras
-            raise signals.TestFailure('\n'.join(error_msgs), extras)
+                        extras["Attempt %d" % (i + 1)] = e.extras
+            raise signals.TestFailure("\n".join(error_msgs), extras)
 
         return wrapper
 
-    def run_generated_testcases(self,
-                                test_func,
-                                settings,
-                                args=None,
-                                kwargs=None,
-                                tag="",
-                                name_func=None,
-                                format_args=False):
+    def run_generated_testcases(
+        self,
+        test_func,
+        settings,
+        args=None,
+        kwargs=None,
+        tag="",
+        name_func=None,
+        format_args=False,
+    ):
         """Deprecated. Please use pre_run and generate_tests.
 
         Generated test cases are not written down as functions, but as a list
@@ -681,27 +681,31 @@
                 try:
                     test_name = name_func(setting, *args, **kwargs)
                 except:
-                    self.log.exception(("Failed to get test name from "
-                                        "test_func. Fall back to default %s"),
-                                       test_name)
+                    self.log.exception(
+                        (
+                            "Failed to get test name from "
+                            "test_func. Fall back to default %s"
+                        ),
+                        test_name,
+                    )
 
             self.results.requested.append(test_name)
 
             if len(test_name) > utils.MAX_FILENAME_LEN:
-                test_name = test_name[:utils.MAX_FILENAME_LEN]
+                test_name = test_name[: utils.MAX_FILENAME_LEN]
 
             previous_success_cnt = len(self.results.passed)
 
             if format_args:
                 self.exec_one_testcase(
                     test_name,
-                    functools.partial(test_func, *(args + (setting, )),
-                                      **kwargs))
+                    functools.partial(test_func, *(args + (setting,)), **kwargs),
+                )
             else:
                 self.exec_one_testcase(
                     test_name,
-                    functools.partial(test_func, *((setting, ) + args),
-                                      **kwargs))
+                    functools.partial(test_func, *((setting,) + args), **kwargs),
+                )
 
             if len(self.results.passed) - previous_success_cnt != 1:
                 failed_settings.append(setting)
@@ -727,11 +731,12 @@
         except signals.TestAbortAll:
             raise
         except:
-            self.log.exception("Exception happened when executing %s in %s.",
-                               func.__name__, self.TAG)
+            self.log.exception(
+                "Exception happened when executing %s in %s.", func.__name__, self.TAG
+            )
             return False
 
-    def _block_all_test_cases(self, tests, reason='Failed class setup'):
+    def _block_all_test_cases(self, tests, reason="Failed class setup"):
         """
         Block all passed in test cases.
         Args:
@@ -743,12 +748,13 @@
             signal = signals.TestError(reason)
             record = records.TestResultRecord(test_name, self.TAG)
             record.test_begin()
-            if hasattr(test_func, 'gather'):
+            if hasattr(test_func, "gather"):
                 signal.extras = test_func.gather()
             record.test_error(signal)
             self.results.add_record(record)
-            self.summary_writer.dump(record.to_dict(),
-                                     records.TestSummaryEntryType.RECORD)
+            self.summary_writer.dump(
+                record.to_dict(), records.TestSummaryEntryType.RECORD
+            )
             self._on_skip(record)
 
     def run(self, test_names=None):
@@ -788,14 +794,18 @@
             matches = []
             for test_name in test_names:
                 for valid_test in valid_tests:
-                    if (fnmatch.fnmatch(valid_test, test_name)
-                            and valid_test not in matches):
+                    if (
+                        fnmatch.fnmatch(valid_test, test_name)
+                        and valid_test not in matches
+                    ):
                         matches.append(valid_test)
         else:
             matches = valid_tests
         self.results.requested = matches
-        self.summary_writer.dump(self.results.requested_test_names_dict(),
-                                 records.TestSummaryEntryType.TEST_NAME_LIST)
+        self.summary_writer.dump(
+            self.results.requested_test_names_dict(),
+            records.TestSummaryEntryType.TEST_NAME_LIST,
+        )
         tests = self._get_test_methods(matches)
 
         # Setup for the class.
@@ -806,7 +816,7 @@
                 self._block_all_test_cases(tests)
                 setup_fail = True
         except signals.TestAbortClass:
-            self.log.exception('Test class %s aborted' % self.TAG)
+            self.log.exception("Test class %s aborted" % self.TAG)
             setup_fail = True
         except Exception as e:
             self.log.exception("Failed to setup %s.", self.TAG)
@@ -814,17 +824,21 @@
             setup_fail = True
         if setup_fail:
             self._exec_func(self._teardown_class)
-            self.log.info("Summary for test class %s: %s", self.TAG,
-                          self.results.summary_str())
+            self.log.info(
+                "Summary for test class %s: %s", self.TAG, self.results.summary_str()
+            )
             return self.results
 
         # Run tests in order.
         test_case_iterations = self.user_params.get(
-            keys.Config.key_test_case_iterations.value, 1)
-        if any([
+            keys.Config.key_test_case_iterations.value, 1
+        )
+        if any(
+            [
                 substr in self.__class__.__name__
-                for substr in ['Preflight', 'Postflight']
-        ]):
+                for substr in ["Preflight", "Postflight"]
+            ]
+        ):
             test_case_iterations = 1
         try:
             for test_name, test_func in tests:
@@ -832,7 +846,7 @@
                     self.exec_one_testcase(test_name, test_func)
             return self.results
         except signals.TestAbortClass:
-            self.log.exception('Test class %s aborted' % self.TAG)
+            self.log.exception("Test class %s aborted" % self.TAG)
             return self.results
         except signals.TestAbortAll as e:
             # Piggy-back test results on this exception object so we don't lose
@@ -841,8 +855,9 @@
             raise e
         finally:
             self._exec_func(self._teardown_class)
-            self.log.info("Summary for test class %s: %s", self.TAG,
-                          self.results.summary_str())
+            self.log.info(
+                "Summary for test class %s: %s", self.TAG, self.results.summary_str()
+            )
 
     def _ad_take_bugreport(self, ad, test_name, begin_time):
         for i in range(3):
@@ -863,15 +878,17 @@
             try:
                 ad.get_qxdm_logs(test_name, qxdm_begin_time)
             except Exception as e:
-                ad.log.error("Failed to get QXDM log for %s with error %s",
-                             test_name, e)
+                ad.log.error(
+                    "Failed to get QXDM log for %s with error %s", test_name, e
+                )
                 result = False
 
         try:
             ad.check_crash_report(test_name, begin_time, log_crash_report=True)
         except Exception as e:
-            ad.log.error("Failed to check crash report for %s with error %s",
-                         test_name, e)
+            ad.log.error(
+                "Failed to check crash report for %s with error %s", test_name, e
+            )
             result = False
         return result
 
@@ -890,15 +907,13 @@
         # problematic tests, we skip bugreport and other failure artifact
         # creation.
         class_name = self.__class__.__name__
-        quiet_tests = self.user_params.get('quiet_tests', [])
+        quiet_tests = self.user_params.get("quiet_tests", [])
         if class_name in quiet_tests:
-            self.log.info(
-                "Skipping bug report, as directed for this test class.")
+            self.log.info("Skipping bug report, as directed for this test class.")
             return True
-        full_test_name = '%s.%s' % (class_name, test_name)
+        full_test_name = "%s.%s" % (class_name, test_name)
         if full_test_name in quiet_tests:
-            self.log.info(
-                "Skipping bug report, as directed for this test case.")
+            self.log.info("Skipping bug report, as directed for this test case.")
             return True
 
         # Once we hit a certain log path size, it's not going to get smaller.
@@ -907,7 +922,8 @@
             return True
         try:
             max_log_size = int(
-                self.user_params.get("soft_output_size_limit") or "invalid")
+                self.user_params.get("soft_output_size_limit") or "invalid"
+            )
             log_path = getattr(logging, "log_path", None)
             if log_path:
                 curr_log_size = utils.get_directory_size(log_path)
@@ -926,10 +942,9 @@
             return
 
         executor = ThreadPoolExecutor(max_workers=10)
-        for ad in getattr(self, 'android_devices', []):
+        for ad in getattr(self, "android_devices", []):
             executor.submit(self._ad_take_bugreport, ad, test_name, begin_time)
-            executor.submit(self._ad_take_extra_logs, ad, test_name,
-                            begin_time)
+            executor.submit(self._ad_take_extra_logs, ad, test_name, begin_time)
         executor.shutdown()
 
     def _reboot_device(self, ad):
@@ -937,24 +952,23 @@
         ad = ad.reboot()
 
     def _cleanup_logger_sessions(self):
-        for (mylogger, session) in self.logger_sessions:
-            self.log.info("Resetting a diagnostic session %s, %s", mylogger,
-                          session)
+        for mylogger, session in self.logger_sessions:
+            self.log.info("Resetting a diagnostic session %s, %s", mylogger, session)
             mylogger.reset()
         self.logger_sessions = []
 
     def _pull_diag_logs(self, test_name, begin_time):
-        for (mylogger, session) in self.logger_sessions:
+        for mylogger, session in self.logger_sessions:
             self.log.info("Pulling diagnostic session %s", mylogger)
             mylogger.stop(session)
             diag_path = os.path.join(
-                self.log_path, logger.epoch_to_log_line_timestamp(begin_time))
+                self.log_path, logger.epoch_to_log_line_timestamp(begin_time)
+            )
             os.makedirs(diag_path, exist_ok=True)
             mylogger.pull(session, diag_path)
 
     def register_test_class_event_subscriptions(self):
-        self.class_subscriptions = subscription_bundle.create_from_instance(
-            self)
+        self.class_subscriptions = subscription_bundle.create_from_instance(self)
         self.class_subscriptions.register()
 
     def unregister_test_class_event_subscriptions(self):
diff --git a/src/antlion/bin/act.py b/src/antlion/bin/act.py
index 81d0452..2f78645 100755
--- a/src/antlion/bin/act.py
+++ b/src/antlion/bin/act.py
@@ -54,8 +54,7 @@
     except signals.TestAbortAll:
         return True
     except:
-        print("Exception when executing %s, iteration %s." %
-              (runner.testbed_name, i))
+        print("Exception when executing %s, iteration %s." % (runner.testbed_name, i))
         print(traceback.format_exc())
     finally:
         runner.stop()
@@ -109,8 +108,10 @@
             ret = _run_test(c, test_identifiers, repeat)
             ok = ok and ret
         except Exception as e:
-            print("Exception occurred when executing test bed %s. %s" %
-                  (c.testbed_name, e))
+            print(
+                "Exception occurred when executing test bed %s. %s"
+                % (c.testbed_name, e)
+            )
     return ok
 
 
@@ -122,63 +123,80 @@
     functions and acts.test_runner.execute_one_test_class.
     """
     parser = argparse.ArgumentParser(
-        description=("Specify tests to run. If nothing specified, "
-                     "run all test cases found."))
-    parser.add_argument('-c',
-                        '--config',
-                        type=str,
-                        required=True,
-                        metavar="<PATH>",
-                        help="Path to the test configuration file.")
+        description=(
+            "Specify tests to run. If nothing specified, " "run all test cases found."
+        )
+    )
     parser.add_argument(
-        '-ci',
-        '--campaign_iterations',
+        "-c",
+        "--config",
+        type=str,
+        required=True,
+        metavar="<PATH>",
+        help="Path to the test configuration file.",
+    )
+    parser.add_argument(
+        "-ci",
+        "--campaign_iterations",
         metavar="<CAMPAIGN_ITERATIONS>",
-        nargs='?',
+        nargs="?",
         type=int,
         const=1,
         default=1,
-        help="Number of times to run the campaign or a group of test cases.")
-    parser.add_argument('-tb',
-                        '--testbed',
-                        nargs='+',
-                        type=str,
-                        metavar="[<TEST BED NAME1> <TEST BED NAME2> ...]",
-                        help="Specify which test beds to run tests on.")
-    parser.add_argument('-lp',
-                        '--logpath',
-                        type=str,
-                        metavar="<PATH>",
-                        help="Root path under which all logs will be placed.")
+        help="Number of times to run the campaign or a group of test cases.",
+    )
     parser.add_argument(
-        '-tp',
-        '--testpaths',
-        nargs='*',
+        "-tb",
+        "--testbed",
+        nargs="+",
+        type=str,
+        metavar="[<TEST BED NAME1> <TEST BED NAME2> ...]",
+        help="Specify which test beds to run tests on.",
+    )
+    parser.add_argument(
+        "-lp",
+        "--logpath",
+        type=str,
+        metavar="<PATH>",
+        help="Root path under which all logs will be placed.",
+    )
+    parser.add_argument(
+        "-tp",
+        "--testpaths",
+        nargs="*",
         type=str,
         metavar="<PATH> <PATH>",
-        help="One or more non-recursive test class search paths.")
+        help="One or more non-recursive test class search paths.",
+    )
 
     group = parser.add_mutually_exclusive_group(required=True)
-    group.add_argument('-tc',
-                       '--testclass',
-                       nargs='+',
-                       type=str,
-                       metavar="[TestClass1 TestClass2:test_xxx ...]",
-                       help="A list of test classes/cases to run.")
     group.add_argument(
-        '-tf',
-        '--testfile',
+        "-tc",
+        "--testclass",
+        nargs="+",
+        type=str,
+        metavar="[TestClass1 TestClass2:test_xxx ...]",
+        help="A list of test classes/cases to run.",
+    )
+    group.add_argument(
+        "-tf",
+        "--testfile",
         nargs=1,
         type=str,
         metavar="<PATH>",
-        help=("Path to a file containing a comma delimited list of test "
-              "classes to run."))
-    parser.add_argument('-ti',
-                        '--test_case_iterations',
-                        metavar="<TEST_CASE_ITERATIONS>",
-                        nargs='?',
-                        type=int,
-                        help="Number of times to run every test case.")
+        help=(
+            "Path to a file containing a comma delimited list of test "
+            "classes to run."
+        ),
+    )
+    parser.add_argument(
+        "-ti",
+        "--test_case_iterations",
+        metavar="<TEST_CASE_ITERATIONS>",
+        nargs="?",
+        type=int,
+        help="Number of times to run every test case.",
+    )
 
     args = parser.parse_args(sys.argv[1:])
     test_list = None
@@ -186,12 +204,18 @@
         test_list = config_parser.parse_test_file(args.testfile[0])
     elif args.testclass:
         test_list = args.testclass
-    if re.search(r'\.ya?ml$', args.config):
-        parsed_configs = mobly_config_parser.load_test_config_file(
-            args.config, args.testbed)
-    else:
-        parsed_configs = config_parser.load_test_config_file(
-            args.config, args.testbed)
+
+    config = args.config
+
+    if config.endswith(".json"):
+        print(
+            "DEPRECATION NOTICE: Converting ACTS JSON to Mobly YAML. ACTS is "
+            + "deprecated. Support will be removed in the next release."
+        )
+        config = utils.acts_json_to_mobly_yaml(config)
+        print(f"Wrote YAML config to {config}")
+
+    parsed_configs = mobly_config_parser.load_test_config_file(config, args.testbed)
 
     for test_run_config in parsed_configs:
         if args.testpaths:
@@ -205,10 +229,12 @@
 
         # Sets the --testpaths flag to the default test directory if left unset.
         testpath_key = keys.Config.key_test_paths.value
-        if (testpath_key not in test_run_config.controller_configs
-                or test_run_config.controller_configs[testpath_key] is None):
+        if (
+            testpath_key not in test_run_config.controller_configs
+            or test_run_config.controller_configs[testpath_key] is None
+        ):
             test_run_config.controller_configs[testpath_key] = [
-                os.path.join(os.path.dirname(__file__), '../tests/'),
+                os.path.join(os.path.dirname(__file__), "../tests/"),
             ]
 
         for path in test_run_config.controller_configs[testpath_key]:
@@ -217,15 +243,25 @@
         # TODO(markdr): Find a way to merge this with the validation done in
         # Mobly's load_test_config_file.
         if not test_run_config.log_path:
-            raise ActsConfigError("Required key %s missing in test config." %
-                                  keys.Config.key_log_path.value)
+            raise ActsConfigError(
+                "Required key %s missing in test config."
+                % keys.Config.key_log_path.value
+            )
         test_run_config.log_path = utils.abs_path(test_run_config.log_path)
 
     # Prepare args for test runs
     test_identifiers = config_parser.parse_test_list(test_list)
 
-    exec_result = _run_tests(parsed_configs, test_identifiers,
-                             args.campaign_iterations)
+    print(
+        "\n\nDEPRECATION NOTICE: Running antlion tests with act.py is "
+        "deprecated and will be removed in the next release. Please migrate "
+        "by using Mobly YAML configs and executing the test class directly:\n\n"
+    )
+    for test_class, _ in test_identifiers:
+        print(f"   python {test_class}.py -c {config}")
+    print("\n")
+
+    exec_result = _run_tests(parsed_configs, test_identifiers, args.campaign_iterations)
     if exec_result is False:
         # return 1 upon test failure.
         sys.exit(1)
diff --git a/src/antlion/libs/utils/__init__.py b/src/antlion/capabilities/__init__.py
similarity index 100%
rename from src/antlion/libs/utils/__init__.py
rename to src/antlion/capabilities/__init__.py
diff --git a/src/antlion/capabilities/ssh.py b/src/antlion/capabilities/ssh.py
new file mode 100644
index 0000000..eeb1e16
--- /dev/null
+++ b/src/antlion/capabilities/ssh.py
@@ -0,0 +1,377 @@
+#!/usr/bin/env python3
+#
+# Copyright 2023 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import subprocess
+import time
+
+from dataclasses import dataclass
+from typing import List, Union, BinaryIO
+
+from antlion import logger
+from antlion import signals
+from antlion.net import wait_for_port
+
+DEFAULT_SSH_PORT: int = 22
+DEFAULT_SSH_TIMEOUT_SEC: int = 60
+DEFAULT_SSH_CONNECT_TIMEOUT_SEC: int = 90
+DEFAULT_SSH_SERVER_ALIVE_INTERVAL: int = 30
+# The default package repository for all components.
+
+
+class SSHResult:
+    """Result of an SSH command."""
+
+    def __init__(
+        self, process: Union[subprocess.CompletedProcess, subprocess.CalledProcessError]
+    ) -> None:
+        self._raw_stdout = process.stdout
+        self._stderr = process.stderr.decode("utf-8", errors="replace")
+        self._exit_status: int = process.returncode
+
+    def __str__(self):
+        if self.exit_status == 0:
+            return self.stdout
+        return f'status {self.exit_status}, stdout: "{self.stdout}", stderr: "{self.stderr}"'
+
+    @property
+    def stdout(self) -> str:
+        if not hasattr(self, "_stdout"):
+            self._stdout = self._raw_stdout.decode("utf-8", errors="replace")
+        return self._stdout
+
+    @property
+    def stderr(self) -> str:
+        return self._stderr
+
+    @property
+    def exit_status(self) -> int:
+        return self._exit_status
+
+    @property
+    def raw_stdout(self) -> bytes:
+        return self._raw_stdout
+
+
+class SSHError(signals.TestError):
+    """A SSH command returned with a non-zero status code."""
+
+    def __init__(self, command: str, result: SSHResult):
+        super().__init__(f'SSH command "{command}" unexpectedly returned {result}')
+        self.result = result
+
+
+class SSHTimeout(signals.TestError):
+    """A SSH command timed out."""
+
+    def __init__(self, err: subprocess.TimeoutExpired):
+        super().__init__(
+            f'SSH command "{err.cmd}" timed out after {err.timeout}s, '
+            f'stdout="{err.stdout}", stderr="{err.stderr}"'
+        )
+
+
+class SSHTransportError(signals.TestError):
+    """Failure to send an SSH command."""
+
+
+@dataclass
+class SSHConfig:
+    """SSH client config."""
+
+    # SSH flags. See ssh(1) for full details.
+    user: str
+    host_name: str
+    identity_file: str
+
+    ssh_binary: str = "ssh"
+    config_file: str = "/dev/null"
+    port: int = 22
+
+    # SSH options. See ssh_config(5) for full details.
+    connect_timeout: int = DEFAULT_SSH_CONNECT_TIMEOUT_SEC
+    server_alive_interval: int = DEFAULT_SSH_SERVER_ALIVE_INTERVAL
+    strict_host_key_checking: bool = False
+    user_known_hosts_file: str = "/dev/null"
+    log_level: str = "ERROR"
+
+    def full_command(self, command: str, force_tty: bool = False) -> List[str]:
+        """Generate the complete command to execute command over SSH.
+
+        Args:
+            command: The command to run over SSH
+            force_tty: Force pseudo-terminal allocation. This can be used to
+                execute arbitrary screen-based programs on a remote machine,
+                which can be very useful, e.g. when implementing menu services.
+
+        Returns:
+            Arguments composing the complete call to SSH.
+        """
+        optional_flags = []
+        if force_tty:
+            # Multiple -t options force tty allocation, even if ssh has no local
+            # tty. This is necessary for launching ssh with subprocess without
+            # shell=True.
+            optional_flags.append("-tt")
+
+        return (
+            [
+                self.ssh_binary,
+                # SSH flags
+                "-i",
+                self.identity_file,
+                "-F",
+                self.config_file,
+                "-p",
+                str(self.port),
+                # SSH configuration options
+                "-o",
+                f"ConnectTimeout={self.connect_timeout}",
+                "-o",
+                f"ServerAliveInterval={self.server_alive_interval}",
+                "-o",
+                f'StrictHostKeyChecking={"yes" if self.strict_host_key_checking else "no"}',
+                "-o",
+                f"UserKnownHostsFile={self.user_known_hosts_file}",
+                "-o",
+                f"LogLevel={self.log_level}",
+            ]
+            + optional_flags
+            + [f"{self.user}@{self.host_name}"]
+            + command.split()
+        )
+
+
+class SSHProvider:
+    """Device-specific provider for SSH clients."""
+
+    def __init__(self, config: SSHConfig) -> None:
+        """
+        Args:
+            config: SSH client config
+        """
+        logger_tag = f"ssh | {config.host_name}"
+        if config.port != DEFAULT_SSH_PORT:
+            logger_tag += f":{config.port}"
+
+        # Check if the private key exists
+
+        self.log = logger.create_tagged_trace_logger(logger_tag)
+        self.config = config
+
+        try:
+            self.wait_until_reachable()
+            self.log.info("sshd is reachable")
+        except Exception as e:
+            raise TimeoutError("sshd is unreachable") from e
+
+    def wait_until_reachable(self) -> None:
+        """Wait for the device to become reachable via SSH.
+
+        Raises:
+            TimeoutError: connect_timeout has expired without a successful SSH
+                connection to the device
+            SSHTransportError: SSH is available on the device but
+                connect_timeout has expired and SSH fails to run
+            SSHTimeout: SSH is available on the device but connect_timeout has
+                expired and SSH takes too long to run a command
+        """
+        timeout_sec = self.config.connect_timeout
+        timeout = time.time() + timeout_sec
+        wait_for_port(self.config.host_name, self.config.port, timeout_sec=timeout_sec)
+
+        while True:
+            try:
+                self._run("echo", timeout_sec, False, None)
+                return
+            except SSHTransportError as e:
+                # Repeat if necessary; _run() can exit prematurely by receiving
+                # SSH transport errors. These errors can be caused by sshd not
+                # being fully initialized yet.
+                if time.time() < timeout:
+                    continue
+                else:
+                    raise e
+
+    def wait_until_unreachable(
+        self, interval_sec: int = 1, timeout_sec: int = DEFAULT_SSH_CONNECT_TIMEOUT_SEC
+    ) -> None:
+        """Wait for the device to become unreachable via SSH.
+
+        Args:
+            interval_sec: Seconds to wait between unreachability attempts
+            timeout_sec: Seconds to wait until raising TimeoutError
+
+        Raises:
+            TimeoutError: when timeout_sec has expired without an unsuccessful
+                SSH connection to the device
+        """
+        timeout = time.time() + timeout_sec
+
+        while True:
+            try:
+                wait_for_port(
+                    self.config.host_name, self.config.port, timeout_sec=interval_sec
+                )
+            except TimeoutError:
+                return
+
+            if time.time() < timeout:
+                raise TimeoutError(
+                    f"Connection to {self.config.host_name} is still reachable "
+                    f"after {timeout_sec}s"
+                )
+
+    def run(
+        self,
+        command: str,
+        timeout_sec: int = DEFAULT_SSH_TIMEOUT_SEC,
+        connect_retries: int = 3,
+        force_tty: bool = False,
+    ) -> SSHResult:
+        """Run a command on the device then exit.
+
+        Args:
+            command: String to send to the device.
+            timeout_sec: Seconds to wait for the command to complete.
+            connect_retries: Amount of times to retry connect on fail.
+            force_tty: Force pseudo-terminal allocation.
+
+        Raises:
+            SSHError: if the SSH command returns a non-zero status code
+            SSHTransportError: if SSH fails to run the command
+            SSHTimeout: if there is no response within timeout_sec
+
+        Returns:
+            SSHResults from the executed command.
+        """
+        return self._run_with_retry(
+            command, timeout_sec, connect_retries, force_tty, stdin=None
+        )
+
+    def _run_with_retry(
+        self,
+        command: str,
+        timeout_sec: int,
+        connect_retries: int,
+        force_tty: bool,
+        stdin: BinaryIO,
+    ) -> SSHResult:
+        err: Exception = ValueError("connect_retries cannot be 0")
+        for i in range(0, connect_retries):
+            try:
+                return self._run(command, timeout_sec, force_tty, stdin)
+            except SSHTransportError as e:
+                err = e
+                self.log.warn(f"Connect failed: {e}")
+        raise err
+
+    def _run(
+        self, command: str, timeout_sec: int, force_tty: bool, stdin: BinaryIO
+    ) -> SSHResult:
+        full_command = self.config.full_command(command, force_tty)
+        self.log.debug(
+            f'Running "{command}" (full command: "{" ".join(full_command)}")'
+        )
+        try:
+            process = subprocess.run(
+                full_command,
+                capture_output=True,
+                timeout=timeout_sec,
+                check=True,
+                stdin=stdin,
+            )
+        except subprocess.CalledProcessError as e:
+            if e.returncode == 255:
+                stderr = e.stderr.decode("utf-8", errors="replace")
+                if (
+                    "Name or service not known" in stderr
+                    or "Host does not exist" in stderr
+                ):
+                    raise SSHTransportError(
+                        f"Hostname {self.config.host_name} cannot be resolved to an address"
+                    ) from e
+                if "Connection timed out" in stderr:
+                    raise SSHTransportError(
+                        f"Failed to establish a connection to {self.config.host_name} within {timeout_sec}s"
+                    ) from e
+                if "Connection refused" in stderr:
+                    raise SSHTransportError(
+                        f"Connection refused by {self.config.host_name}"
+                    ) from e
+
+            raise SSHError(command, SSHResult(e)) from e
+        except subprocess.TimeoutExpired as e:
+            raise SSHTimeout(e) from e
+
+        return SSHResult(process)
+
+    def upload_file(
+        self,
+        local_path: str,
+        remote_path: str,
+        timeout_sec: int = DEFAULT_SSH_TIMEOUT_SEC,
+        connect_retries: int = 3,
+    ) -> None:
+        """Upload a file to the device.
+
+        Args:
+            local_path: Path to the file to upload
+            remote_path: Path on the remote device to place the uploaded file.
+            timeout_sec: Seconds to wait for the command to complete.
+            connect_retries: Amount of times to retry connect on fail.
+
+        Raises:
+            SSHError: if the SSH upload returns a non-zero status code
+            SSHTransportError: if SSH fails to run the upload command
+            SSHTimeout: if there is no response within timeout_sec
+        """
+        file = open(local_path, "rb")
+        self._run_with_retry(
+            f"cat > {remote_path}",
+            timeout_sec,
+            connect_retries,
+            force_tty=False,
+            stdin=file,
+        )
+
+    def download_file(
+        self,
+        remote_path: str,
+        local_path: str,
+        timeout_sec: int = DEFAULT_SSH_TIMEOUT_SEC,
+        connect_retries: int = 3,
+    ) -> None:
+        """Upload a file to the device.
+
+        Args:
+            remote_path: Path on the remote device to download.
+            local_path: Path on the host to the place the downloaded file.
+            timeout_sec: Seconds to wait for the command to complete.
+            connect_retries: Amount of times to retry connect on fail.
+
+        Raises:
+            SSHError: if the SSH command returns a non-zero status code
+            SSHTransportError: if SSH fails to run the command
+            SSHTimeout: if there is no response within timeout_sec
+        """
+        file = open(local_path, "rb")
+        return self._run_with_retry(
+            f"cat > {remote_path}",
+            timeout_sec,
+            connect_retries,
+            force_tty=False,
+            stdin=file,
+        )
diff --git a/src/antlion/config_parser.py b/src/antlion/config_parser.py
index 0cfb308..7f202ff 100755
--- a/src/antlion/config_parser.py
+++ b/src/antlion/config_parser.py
@@ -23,12 +23,12 @@
 from antlion import utils
 
 # An environment variable defining the base location for ACTS logs.
-_ENV_ACTS_LOGPATH = 'ACTS_LOGPATH'
+_ENV_ACTS_LOGPATH = "ACTS_LOGPATH"
 # An environment variable that enables test case failures to log stack traces.
-_ENV_TEST_FAILURE_TRACEBACKS = 'ACTS_TEST_FAILURE_TRACEBACKS'
+_ENV_TEST_FAILURE_TRACEBACKS = "ACTS_TEST_FAILURE_TRACEBACKS"
 # An environment variable defining the test search paths for ACTS.
-_ENV_ACTS_TESTPATHS = 'ACTS_TESTPATHS'
-_PATH_SEPARATOR = ':'
+_ENV_ACTS_TESTPATHS = "ACTS_TESTPATHS"
+_PATH_SEPARATOR = ":"
 
 
 class ActsConfigError(Exception):
@@ -43,13 +43,11 @@
     for k in keys.Config.reserved_keys.value:
         # TODO(markdr): Remove this continue after merging this with the
         # validation done in Mobly's load_test_config_file.
-        if (k == keys.Config.key_test_paths.value
-                or k == keys.Config.key_log_path.value):
+        if k == keys.Config.key_test_paths.value or k == keys.Config.key_log_path.value:
             continue
 
         if k not in test_config:
-            raise ActsConfigError("Required key %s missing in test config." %
-                                  k)
+            raise ActsConfigError("Required key %s missing in test config." % k)
 
 
 def _validate_testbed_name(name):
@@ -70,58 +68,26 @@
         raise ActsConfigError("Test bed names have to be string.")
     for l in name:
         if l not in utils.valid_filename_chars:
-            raise ActsConfigError(
-                "Char '%s' is not allowed in test bed names." % l)
+            raise ActsConfigError("Char '%s' is not allowed in test bed names." % l)
 
 
-def _update_file_paths(config, config_path):
-    """ Checks if the path entries are valid.
-
-    If the file path is invalid, assume it is a relative path and append
-    that to the config file path.
-
-    Args:
-        config : the config object to verify.
-        config_path : The path to the config file, which can be used to
-                      generate absolute paths from relative paths in configs.
-
-    Raises:
-        If the file path is invalid, ActsConfigError is raised.
-    """
-    # Check the file_path_keys and update if it is a relative path.
-    for file_path_key in keys.Config.file_path_keys.value:
-        if file_path_key in config:
-            config_file = config[file_path_key]
-            if type(config_file) is str:
-                if not os.path.isfile(config_file):
-                    config_file = os.path.join(config_path, config_file)
-                if not os.path.isfile(config_file):
-                    raise ActsConfigError(
-                        "Unable to load config %s from test "
-                        "config file.", config_file)
-                config[file_path_key] = config_file
-
-
-def _validate_testbed_configs(testbed_configs, config_path):
+def _validate_testbed_configs(testbed_configs):
     """Validates the testbed configurations.
 
     Args:
         testbed_configs: A list of testbed configuration json objects.
-        config_path : The path to the config file, which can be used to
-                      generate absolute paths from relative paths in configs.
 
     Raises:
         If any part of the configuration is invalid, ActsConfigError is raised.
     """
     # Cross checks testbed configs for resource conflicts.
-    for name, config in testbed_configs.items():
-        _update_file_paths(config, config_path)
+    for name in testbed_configs:
         _validate_testbed_name(name)
 
 
 def gen_term_signal_handler(test_runners):
     def termination_sig_handler(signal_num, frame):
-        print('Received sigterm %s.' % signal_num)
+        print("Received sigterm %s." % signal_num)
         for t in test_runners:
             t.stop()
         sys.exit(1)
@@ -141,7 +107,7 @@
         name, the list of strings is a list of test case names. The list can be
         None.
     """
-    tokens = item.split(':')
+    tokens = item.split(":")
     if len(tokens) > 2:
         raise ActsConfigError("Syntax error in test specifier %s" % item)
     if len(tokens) == 1:
@@ -152,7 +118,7 @@
         # This should be considered a test class name followed by
         # a list of test case names.
         test_cls_name, test_case_names = tokens
-        clean_names = [elem.strip() for elem in test_case_names.split(',')]
+        clean_names = [elem.strip() for elem in test_case_names.split(",")]
         return test_cls_name, clean_names
 
 
@@ -206,44 +172,49 @@
             else:
                 raise ActsConfigError(
                     'Expected testbed named "%s", but none was found. Check '
-                    'if you have the correct testbed names.' % name)
+                    "if you have the correct testbed names." % name
+                )
         testbeds = tbs
 
-    if (keys.Config.key_log_path.value not in configs
-            and _ENV_ACTS_LOGPATH in os.environ):
-        print('Using environment log path: %s' %
-              (os.environ[_ENV_ACTS_LOGPATH]))
+    if (
+        keys.Config.key_log_path.value not in configs
+        and _ENV_ACTS_LOGPATH in os.environ
+    ):
+        print("Using environment log path: %s" % (os.environ[_ENV_ACTS_LOGPATH]))
         configs[keys.Config.key_log_path.value] = os.environ[_ENV_ACTS_LOGPATH]
-    if (keys.Config.key_test_paths.value not in configs
-            and _ENV_ACTS_TESTPATHS in os.environ):
-        print('Using environment test paths: %s' %
-              (os.environ[_ENV_ACTS_TESTPATHS]))
-        configs[keys.Config.key_test_paths.
-                value] = os.environ[_ENV_ACTS_TESTPATHS].split(_PATH_SEPARATOR)
-    if (keys.Config.key_test_failure_tracebacks not in configs
-            and _ENV_TEST_FAILURE_TRACEBACKS in os.environ):
-        configs[keys.Config.key_test_failure_tracebacks.
-                value] = os.environ[_ENV_TEST_FAILURE_TRACEBACKS]
+    if (
+        keys.Config.key_test_paths.value not in configs
+        and _ENV_ACTS_TESTPATHS in os.environ
+    ):
+        print("Using environment test paths: %s" % (os.environ[_ENV_ACTS_TESTPATHS]))
+        configs[keys.Config.key_test_paths.value] = os.environ[
+            _ENV_ACTS_TESTPATHS
+        ].split(_PATH_SEPARATOR)
+    if (
+        keys.Config.key_test_failure_tracebacks not in configs
+        and _ENV_TEST_FAILURE_TRACEBACKS in os.environ
+    ):
+        configs[keys.Config.key_test_failure_tracebacks.value] = os.environ[
+            _ENV_TEST_FAILURE_TRACEBACKS
+        ]
 
     # TODO: See if there is a better way to do this: b/29836695
     config_path, _ = os.path.split(utils.abs_path(test_config_path))
     configs[keys.Config.key_config_path.value] = config_path
     _validate_test_config(configs)
-    _validate_testbed_configs(testbeds, config_path)
+    _validate_testbed_configs(testbeds)
     # Unpack testbeds into separate json objects.
     configs.pop(keys.Config.key_testbed.value)
     test_run_configs = []
 
     for _, testbed in testbeds.items():
         test_run_config = mobly_config_parser.TestRunConfig()
-        test_run_config.testbed_name = testbed[
-            keys.Config.key_testbed_name.value]
+        test_run_config.testbed_name = testbed[keys.Config.key_testbed_name.value]
         test_run_config.controller_configs = testbed
         test_run_config.controller_configs[
-            keys.Config.key_test_paths.value] = configs.get(
-                keys.Config.key_test_paths.value, None)
-        test_run_config.log_path = configs.get(keys.Config.key_log_path.value,
-                                               None)
+            keys.Config.key_test_paths.value
+        ] = configs.get(keys.Config.key_test_paths.value, None)
+        test_run_config.log_path = configs.get(keys.Config.key_log_path.value, None)
         if test_run_config.log_path is not None:
             test_run_config.log_path = utils.abs_path(test_run_config.log_path)
 
@@ -266,13 +237,13 @@
     Returns:
         A list of strings, each is a test specifier.
     """
-    with open(fpath, 'r') as f:
+    with open(fpath, "r") as f:
         tf = []
         for line in f:
             line = line.strip()
             if not line:
                 continue
-            if len(tf) and (tf[-1].endswith(':') or tf[-1].endswith(',')):
+            if len(tf) and (tf[-1].endswith(":") or tf[-1].endswith(",")):
                 tf[-1] += line
             else:
                 tf.append(line)
diff --git a/src/antlion/context.py b/src/antlion/context.py
index 5fe1417..cfe9df8 100644
--- a/src/antlion/context.py
+++ b/src/antlion/context.py
@@ -48,7 +48,7 @@
     """
     if depth is None:
         return _contexts[-1]
-    return _contexts[min(depth, len(_contexts)-1)]
+    return _contexts[min(depth, len(_contexts) - 1)]
 
 
 def get_context_for_event(event):
@@ -67,7 +67,7 @@
         return _get_context_for_test_case_event(event)
     if isinstance(event, TestClassEvent):
         return _get_context_for_test_class_event(event)
-    raise TypeError('Unrecognized event type: %s %s', event, event.__class__)
+    raise TypeError("Unrecognized event type: %s %s", event, event.__class__)
 
 
 def _get_context_for_test_case_event(event):
@@ -180,7 +180,7 @@
         Args:
             log_name: The name of the logger.
             base_output_path: The base path of output files for this logger.
-            """
+        """
         cls._base_output_paths[log_name] = base_output_path
 
     def get_subcontext(self, log_name=None):
@@ -198,7 +198,7 @@
         Returns:
             The output path.
         """
-        return self._subcontexts.get(log_name, '')
+        return self._subcontexts.get(log_name, "")
 
     @classmethod
     def add_subcontext(cls, log_name, subcontext):
@@ -225,9 +225,11 @@
             The output path.
         """
 
-        path = os.path.join(self.get_base_output_path(log_name),
-                            self._get_default_context_dir(),
-                            self.get_subcontext(log_name))
+        path = os.path.join(
+            self.get_base_output_path(log_name),
+            self._get_default_context_dir(),
+            self.get_subcontext(log_name),
+        )
         os.makedirs(path, exist_ok=True)
         return path
 
@@ -251,8 +253,9 @@
             return logging.log_path
         except AttributeError as e:
             raise EnvironmentError(
-                'The ACTS logger has not been set up and'
-                ' "base_output_path" has not been set.') from e
+                "The ACTS logger has not been set up and"
+                ' "base_output_path" has not been set.'
+            ) from e
 
     def _get_default_context_dir(self):
         """Gets the default output directory for this context."""
@@ -264,14 +267,14 @@
 
     @property
     def identifier(self):
-        return 'root'
+        return "root"
 
     def _get_default_context_dir(self):
         """Gets the default output directory for this context.
 
         Logs at the root level context are placed directly in the base level
         directory, so no context-level path exists."""
-        return ''
+        return ""
 
 
 class TestClassContext(TestContext):
@@ -336,7 +339,7 @@
 
     @property
     def identifier(self):
-        return '%s.%s' % (self.test_class_name, self.test_case_name)
+        return "%s.%s" % (self.test_class_name, self.test_case_name)
 
     def _get_default_context_dir(self):
         """Gets the default output directory for this context.
@@ -345,9 +348,7 @@
         by the name of the test case. This is in line with the ACTS logger
         itself.
         """
-        return os.path.join(
-            self.test_class_name,
-            self.test_case_name)
+        return os.path.join(self.test_class_name, self.test_case_name)
 
 
 # stack for keeping track of the current test context
diff --git a/src/antlion/controllers/__init__.py b/src/antlion/controllers/__init__.py
index 640393c..e69de29 100644
--- a/src/antlion/controllers/__init__.py
+++ b/src/antlion/controllers/__init__.py
@@ -1,31 +0,0 @@
-"""Modules under antlion.controllers provide interfaces to hardware/software
-resources that ACTS manages.
-
-Top level controllers module are controller modules that need to be explicitly
-specified by users in test configuration files. Top level controller modules
-should have the following module level functions:
-
-def create(configs, logger):
-    '''Instantiates the controller class with the input configs.
-    Args:
-        configs: A list of dicts each representing config for one controller
-            object.
-        logger: The main logger used in the current test run.
-    Returns:
-        A list of controller objects.
-
-def destroy(objs):
-    '''Destroys a list of controller objects created by the "create" function
-    and releases all the resources.
-
-    Args:
-        objs: A list of controller objects created from this module.
-    '''
-"""
-"""This is a list of all the top level controller modules"""
-__all__ = [
-    "android_device", "attenuator", "bluetooth_pts_device", "monsoon",
-    "access_point", "iperf_server", "packet_sender", "arduino_wifi_dongle",
-    "packet_capture", "fuchsia_device", "pdu", "openwrt_ap", "tigertail",
-    "asus_axe11000_ap"
-]
diff --git a/src/antlion/controllers/abstract_inst.py b/src/antlion/controllers/abstract_inst.py
deleted file mode 100644
index d55c3a5..0000000
--- a/src/antlion/controllers/abstract_inst.py
+++ /dev/null
@@ -1,243 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Python module for Abstract Instrument Library."""
-
-import socket
-import requests
-from antlion import logger
-
-
-class SocketInstrumentError(Exception):
-    """Abstract Instrument Error Class, via Socket and SCPI."""
-
-    def __init__(self, error, command=None):
-        """Init method for Socket Instrument Error.
-
-        Args:
-            error: Exception error.
-            command: Additional information on command,
-                Type, Str.
-        """
-        super(SocketInstrumentError, self).__init__(error)
-        self._error_code = error
-        self._error_message = self._error_code
-        if command is not None:
-            self._error_message = 'Command {} returned the error: {}.'.format(
-                repr(command), repr(self._error_message))
-
-    def __str__(self):
-        return self._error_message
-
-
-class SocketInstrument(object):
-    """Abstract Instrument Class, via Socket and SCPI."""
-
-    def __init__(self, ip_addr, ip_port):
-        """Init method for Socket Instrument.
-
-        Args:
-            ip_addr: IP Address.
-                Type, str.
-            ip_port: TCPIP Port.
-                Type, str.
-        """
-        self._socket_timeout = 120
-        self._socket_buffer_size = 1024
-
-        self._ip_addr = ip_addr
-        self._ip_port = ip_port
-
-        self._escseq = '\n'
-        self._codefmt = 'utf-8'
-
-        self._logger = logger.create_tagged_trace_logger(
-            '%s:%s' % (self._ip_addr, self._ip_port))
-
-        self._socket = None
-
-    def _connect_socket(self):
-        """Init and Connect to socket."""
-        try:
-            self._socket = socket.create_connection(
-                (self._ip_addr, self._ip_port), timeout=self._socket_timeout)
-
-            infmsg = 'Opened Socket connection to {}:{} with handle {}.'.format(
-                repr(self._ip_addr), repr(self._ip_port), repr(self._socket))
-            self._logger.debug(infmsg)
-
-        except socket.timeout:
-            errmsg = 'Socket timeout while connecting to instrument.'
-            self._logger.exception(errmsg)
-            raise SocketInstrumentError(errmsg)
-
-        except socket.error:
-            errmsg = 'Socket error while connecting to instrument.'
-            self._logger.exception(errmsg)
-            raise SocketInstrumentError(errmsg)
-
-    def _send(self, cmd):
-        """Send command via Socket.
-
-        Args:
-            cmd: Command to send,
-                Type, Str.
-        """
-        if not self._socket:
-            self._logger.warning('Socket instrument is not connected')
-            self._connect_socket()
-
-        cmd_es = cmd + self._escseq
-
-        try:
-            self._socket.sendall(cmd_es.encode(self._codefmt))
-            self._logger.debug('Sent %r to %r:%r.', cmd, self._ip_addr,
-                               self._ip_port)
-
-        except socket.timeout:
-            errmsg = ('Socket timeout while sending command {} '
-                      'to instrument.').format(repr(cmd))
-            self._logger.exception(errmsg)
-            raise SocketInstrumentError(errmsg)
-
-        except socket.error:
-            errmsg = ('Socket error while sending command {} '
-                      'to instrument.').format(repr(cmd))
-            self._logger.exception(errmsg)
-            raise SocketInstrumentError(errmsg)
-
-        except Exception as err:
-            errmsg = ('Error {} while sending command {} '
-                      'to instrument.').format(repr(cmd), repr(err))
-            self._logger.exception(errmsg)
-            raise
-
-    def _recv(self):
-        """Receive response via Socket.
-
-        Returns:
-            resp: Response from Instrument via Socket,
-                Type, Str.
-        """
-        if not self._socket:
-            self._logger.warning('Socket instrument is not connected')
-            self._connect_socket()
-
-        resp = ''
-
-        try:
-            while True:
-                resp_tmp = self._socket.recv(self._socket_buffer_size)
-                resp_tmp = resp_tmp.decode(self._codefmt)
-                resp += resp_tmp
-                if len(resp_tmp) < self._socket_buffer_size:
-                    break
-
-        except socket.timeout:
-            errmsg = 'Socket timeout while receiving response from instrument.'
-            self._logger.exception(errmsg)
-            raise SocketInstrumentError(errmsg)
-
-        except socket.error:
-            errmsg = 'Socket error while receiving response from instrument.'
-            self._logger.exception(errmsg)
-            raise SocketInstrumentError(errmsg)
-
-        except Exception as err:
-            errmsg = ('Error {} while receiving response '
-                      'from instrument').format(repr(err))
-            self._logger.exception(errmsg)
-            raise
-
-        resp = resp.rstrip(self._escseq)
-
-        self._logger.debug('Received %r from %r:%r.', resp, self._ip_addr,
-                           self._ip_port)
-
-        return resp
-
-    def _close_socket(self):
-        """Close Socket Instrument."""
-        if not self._socket:
-            return
-
-        try:
-            self._socket.shutdown(socket.SHUT_RDWR)
-            self._socket.close()
-            self._socket = None
-            self._logger.debug('Closed Socket Instrument %r:%r.',
-                               self._ip_addr, self._ip_port)
-
-        except Exception as err:
-            errmsg = 'Error {} while closing instrument.'.format(repr(err))
-            self._logger.exception(errmsg)
-            raise
-
-    def _query(self, cmd):
-        """query instrument via Socket.
-
-        Args:
-            cmd: Command to send,
-                Type, Str.
-
-        Returns:
-            resp: Response from Instrument via Socket,
-                Type, Str.
-        """
-        self._send(cmd + ';*OPC?')
-        resp = self._recv()
-        return resp
-
-
-class RequestInstrument(object):
-    """Abstract Instrument Class, via Request."""
-
-    def __init__(self, ip_addr):
-        """Init method for request instrument.
-
-        Args:
-            ip_addr: IP Address.
-                Type, Str.
-        """
-        self._request_timeout = 120
-        self._request_protocol = 'http'
-        self._ip_addr = ip_addr
-        self._escseq = '\r\n'
-
-        self._logger = logger.create_tagged_trace_logger(self._ip_addr)
-
-    def _query(self, cmd):
-        """query instrument via request.
-
-        Args:
-            cmd: Command to send,
-                Type, Str.
-
-        Returns:
-            resp: Response from Instrument via request,
-                Type, Str.
-        """
-        request_cmd = '{}://{}/{}'.format(self._request_protocol,
-                                          self._ip_addr, cmd)
-        resp_raw = requests.get(request_cmd, timeout=self._request_timeout)
-
-        resp = resp_raw.text
-        for char_del in self._escseq:
-            resp = resp.replace(char_del, '')
-
-        self._logger.debug('Sent %r to %r, and get %r.', cmd, self._ip_addr,
-                           resp)
-
-        return resp
diff --git a/src/antlion/controllers/access_point.py b/src/antlion/controllers/access_point.py
index d9116ce..91a241d 100755
--- a/src/antlion/controllers/access_point.py
+++ b/src/antlion/controllers/access_point.py
@@ -14,188 +14,60 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import collections
 import ipaddress
-import os
 import time
 
-from typing import FrozenSet, Set, TYPE_CHECKING
+from dataclasses import dataclass
+from typing import Any, Dict, FrozenSet, List, Optional, Set, Tuple
 
 from antlion import logger
 from antlion import utils
-from antlion.controllers import pdu
-from antlion.controllers.ap_lib import ap_get_interface
-from antlion.controllers.ap_lib import ap_iwconfig
-from antlion.controllers.ap_lib import bridge_interface
-from antlion.controllers.ap_lib import dhcp_config
-from antlion.controllers.ap_lib import dhcp_server
-from antlion.controllers.ap_lib import hostapd
-from antlion.controllers.ap_lib import hostapd_ap_preset
-from antlion.controllers.ap_lib import hostapd_config
+from antlion.capabilities.ssh import SSHConfig, SSHProvider
 from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import radvd
-from antlion.controllers.ap_lib import radvd_config
+from antlion.controllers.ap_lib.ap_get_interface import ApInterfaces
+from antlion.controllers.ap_lib.ap_iwconfig import ApIwconfig
+from antlion.controllers.ap_lib.bridge_interface import BridgeInterface
+from antlion.controllers.ap_lib.dhcp_config import DhcpConfig, Subnet
+from antlion.controllers.ap_lib.dhcp_server import DhcpServer, NoInterfaceError
 from antlion.controllers.ap_lib.extended_capabilities import ExtendedCapabilities
-from antlion.controllers.ap_lib.wireless_network_management import BssTransitionManagementRequest
+from antlion.controllers.ap_lib.hostapd import Hostapd
+from antlion.controllers.ap_lib.hostapd_ap_preset import create_ap_preset
+from antlion.controllers.ap_lib.hostapd_config import HostapdConfig
+from antlion.controllers.ap_lib.hostapd_security import Security
+from antlion.controllers.ap_lib.radvd import Radvd
+from antlion.controllers.ap_lib.radvd_config import RadvdConfig
+from antlion.controllers.ap_lib.wireless_network_management import (
+    BssTransitionManagementRequest,
+)
+from antlion.controllers.pdu import PduDevice, get_pdu_port_for_device
 from antlion.controllers.utils_lib.commands import ip
 from antlion.controllers.utils_lib.commands import route
 from antlion.controllers.utils_lib.ssh import connection
 from antlion.controllers.utils_lib.ssh import settings
 from antlion.libs.proc import job
 
-if TYPE_CHECKING:
-    from antlion.controllers.ap_lib.radvd import Radvd
-
-MOBLY_CONTROLLER_CONFIG_NAME = 'AccessPoint'
-ACTS_CONTROLLER_REFERENCE_NAME = 'access_points'
-_BRCTL = 'brctl'
-
-LIFETIME = 180
-PROC_NET_SNMP6 = '/proc/net/snmp6'
-SCAPY_INSTALL_COMMAND = 'sudo python setup.py install'
-RA_MULTICAST_ADDR = '33:33:00:00:00:01'
-RA_SCRIPT = 'sendra.py'
-
-
-def create(configs):
-    """Creates ap controllers from a json config.
-
-    Creates an ap controller from either a list, or a single
-    element. The element can either be just the hostname or a dictionary
-    containing the hostname and username of the ap to connect to over ssh.
-
-    Args:
-        The json configs that represent this controller.
-
-    Returns:
-        A new AccessPoint.
-    """
-    return [AccessPoint(c) for c in configs]
-
-
-def destroy(aps):
-    """Destroys a list of access points.
-
-    Args:
-        aps: The list of access points to destroy.
-    """
-    for ap in aps:
-        ap.close()
-
-
-def get_info(aps):
-    """Get information on a list of access points.
-
-    Args:
-        aps: A list of AccessPoints.
-
-    Returns:
-        A list of all aps hostname.
-    """
-    return [ap.ssh_settings.hostname for ap in aps]
-
-
-def setup_ap(
-        access_point,
-        profile_name,
-        channel,
-        ssid,
-        mode=None,
-        preamble=None,
-        beacon_interval=None,
-        dtim_period=None,
-        frag_threshold=None,
-        rts_threshold=None,
-        force_wmm=None,
-        hidden=False,
-        security=None,
-        pmf_support=None,
-        additional_ap_parameters=None,
-        password=None,
-        n_capabilities=None,
-        ac_capabilities=None,
-        vht_bandwidth=None,
-        wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
-        setup_bridge=False,
-        is_ipv6_enabled=False,
-        is_nat_enabled=True):
-    """Creates a hostapd profile and runs it on an ap. This is a convenience
-    function that allows us to start an ap with a single function, without first
-    creating a hostapd config.
-
-    Args:
-        access_point: An ACTS access_point controller
-        profile_name: The profile name of one of the hostapd ap presets.
-        channel: What channel to set the AP to.
-        preamble: Whether to set short or long preamble (True or False)
-        beacon_interval: The beacon interval (int)
-        dtim_period: Length of dtim period (int)
-        frag_threshold: Fragmentation threshold (int)
-        rts_threshold: RTS threshold (int)
-        force_wmm: Enable WMM or not (True or False)
-        hidden: Advertise the SSID or not (True or False)
-        security: What security to enable.
-        pmf_support: int, whether pmf is not disabled, enabled, or required
-        additional_ap_parameters: Additional parameters to send the AP.
-        password: Password to connect to WLAN if necessary.
-        check_connectivity: Whether to check for internet connectivity.
-        wnm_features: WNM features to enable on the AP.
-        setup_bridge: Whether to bridge the LAN interface WLAN interface.
-            Only one WLAN interface can be bridged with the LAN interface
-            and none of the guest networks can be bridged.
-        is_ipv6_enabled: If True, start a IPv6 router advertisement daemon
-        is_nat_enabled: If True, start NAT on the AP to allow the DUT to be able
-            to access the internet if the WAN port is connected to the internet.
-
-    Returns:
-        An identifier for each ssid being started. These identifiers can be
-        used later by this controller to control the ap.
-
-    Raises:
-        Error: When the ap can't be brought up.
-    """
-    ap = hostapd_ap_preset.create_ap_preset(profile_name=profile_name,
-                                            iface_wlan_2g=access_point.wlan_2g,
-                                            iface_wlan_5g=access_point.wlan_5g,
-                                            channel=channel,
-                                            ssid=ssid,
-                                            mode=mode,
-                                            short_preamble=preamble,
-                                            beacon_interval=beacon_interval,
-                                            dtim_period=dtim_period,
-                                            frag_threshold=frag_threshold,
-                                            rts_threshold=rts_threshold,
-                                            force_wmm=force_wmm,
-                                            hidden=hidden,
-                                            bss_settings=[],
-                                            security=security,
-                                            pmf_support=pmf_support,
-                                            n_capabilities=n_capabilities,
-                                            ac_capabilities=ac_capabilities,
-                                            vht_bandwidth=vht_bandwidth,
-                                            wnm_features=wnm_features)
-    return access_point.start_ap(
-        hostapd_config=ap,
-        radvd_config=radvd_config.RadvdConfig() if is_ipv6_enabled else None,
-        setup_bridge=setup_bridge,
-        is_nat_enabled=is_nat_enabled,
-        additional_parameters=additional_ap_parameters)
+MOBLY_CONTROLLER_CONFIG_NAME = "AccessPoint"
+ACTS_CONTROLLER_REFERENCE_NAME = "access_points"
 
 
 class Error(Exception):
     """Error raised when there is a problem with the access point."""
 
 
-_ApInstance = collections.namedtuple('_ApInstance', ['hostapd', 'subnet'])
+@dataclass
+class _ApInstance:
+    hostapd: Hostapd
+    subnet: Subnet
+
 
 # These ranges were split this way since each physical radio can have up
 # to 8 SSIDs so for the 2GHz radio the DHCP range will be
 # 192.168.1 - 8 and the 5Ghz radio will be 192.168.9 - 16
-_AP_2GHZ_SUBNET_STR_DEFAULT = '192.168.1.0/24'
-_AP_5GHZ_SUBNET_STR_DEFAULT = '192.168.9.0/24'
+_AP_2GHZ_SUBNET_STR_DEFAULT = "192.168.1.0/24"
+_AP_5GHZ_SUBNET_STR_DEFAULT = "192.168.9.0/24"
 
 # The last digit of the ip for the bridge interface
-BRIDGE_IP_LAST = '100'
+BRIDGE_IP_LAST = "100"
 
 
 class AccessPoint(object):
@@ -207,48 +79,58 @@
         dhcp_settings: The dhcp server settings being used.
     """
 
-    def __init__(self, configs):
+    def __init__(self, configs: Dict[str, Any]) -> None:
         """
         Args:
             configs: configs for the access point from config file.
         """
-        self.ssh_settings = settings.from_config(configs['ssh_config'])
+        self.ssh_settings = settings.from_config(configs["ssh_config"])
         self.log = logger.create_logger(
-            lambda msg: f'[Access Point|{self.ssh_settings.hostname}] {msg}')
-        self.device_pdu_config = configs.get('PduDevice', None)
+            lambda msg: f"[Access Point|{self.ssh_settings.hostname}] {msg}"
+        )
+        self.device_pdu_config = configs.get("PduDevice", None)
         self.identifier = self.ssh_settings.hostname
 
-        if 'ap_subnet' in configs:
-            self._AP_2G_SUBNET_STR = configs['ap_subnet']['2g']
-            self._AP_5G_SUBNET_STR = configs['ap_subnet']['5g']
+        if "ap_subnet" in configs:
+            self._AP_2G_SUBNET_STR: str = configs["ap_subnet"]["2g"]
+            self._AP_5G_SUBNET_STR: str = configs["ap_subnet"]["5g"]
         else:
             self._AP_2G_SUBNET_STR = _AP_2GHZ_SUBNET_STR_DEFAULT
             self._AP_5G_SUBNET_STR = _AP_5GHZ_SUBNET_STR_DEFAULT
 
-        self._AP_2G_SUBNET = dhcp_config.Subnet(
-            ipaddress.ip_network(self._AP_2G_SUBNET_STR))
-        self._AP_5G_SUBNET = dhcp_config.Subnet(
-            ipaddress.ip_network(self._AP_5G_SUBNET_STR))
+        self._AP_2G_SUBNET = Subnet(ipaddress.ip_network(self._AP_2G_SUBNET_STR))
+        self._AP_5G_SUBNET = Subnet(ipaddress.ip_network(self._AP_5G_SUBNET_STR))
 
         self.ssh = connection.SshConnection(self.ssh_settings)
 
+        # TODO(http://b/278758876): Replace self.ssh with self.ssh_provider
+        self.ssh_provider = SSHProvider(
+            SSHConfig(
+                self.ssh_settings.username,
+                self.ssh_settings.hostname,
+                self.ssh_settings.identity_file,
+                port=self.ssh_settings.port,
+                ssh_binary=self.ssh_settings.executable,
+                connect_timeout=90,
+            )
+        )
+
         # Singleton utilities for running various commands.
         self._ip_cmd = ip.LinuxIpCommand(self.ssh)
         self._route_cmd = route.LinuxRouteCommand(self.ssh)
 
         # A map from network interface name to _ApInstance objects representing
         # the hostapd instance running against the interface.
-        self._aps = dict()
-        self._dhcp = None
-        self._dhcp_bss = dict()
-        self._radvd: Radvd = None
-        self.bridge = bridge_interface.BridgeInterface(self)
-        self.iwconfig = ap_iwconfig.ApIwconfig(self)
+        self._aps: Dict[str, _ApInstance] = dict()
+        self._dhcp: Optional[DhcpServer] = None
+        self._dhcp_bss: Dict[Any, Subnet] = dict()
+        self._radvd: Optional[Radvd] = None
+        self.bridge = BridgeInterface(self)
+        self.iwconfig = ApIwconfig(self)
 
         # Check to see if wan_interface is specified in acts_config for tests
         # isolated from the internet and set this override.
-        self.interfaces = ap_get_interface.ApInterfaces(
-            self, configs.get('wan_interface'))
+        self.interfaces = ApInterfaces(self, configs.get("wan_interface"))
 
         # Get needed interface names and initialize the unnecessary ones.
         self.wan = self.interfaces.get_wan_interface()
@@ -257,10 +139,9 @@
         self.wlan_5g = self.wlan[1]
         self.lan = self.interfaces.get_lan_interface()
         self._initial_ap()
-        self.scapy_install_path = None
         self.setup_bridge = False
 
-    def _initial_ap(self):
+    def _initial_ap(self) -> None:
         """Initial AP interfaces.
 
         Bring down hostapd if instance is running, bring down all bridge
@@ -271,32 +152,34 @@
         # interfaces need to be brought down as part of the AP initialization
         # process, otherwise test would fail.
         try:
-            self.ssh.run('stop wpasupplicant')
+            self.ssh.run("stop wpasupplicant")
         except job.Error:
-            self.log.info('No wpasupplicant running')
+            self.log.info("No wpasupplicant running")
         try:
-            self.ssh.run('stop hostapd')
+            self.ssh.run("stop hostapd")
         except job.Error:
-            self.log.info('No hostapd running')
+            self.log.info("No hostapd running")
         # Bring down all wireless interfaces
         for iface in self.wlan:
-            WLAN_DOWN = f'ip link set {iface} down'
+            WLAN_DOWN = f"ip link set {iface} down"
             self.ssh.run(WLAN_DOWN)
         # Bring down all bridge interfaces
         bridge_interfaces = self.interfaces.get_bridge_interface()
         if bridge_interfaces:
             for iface in bridge_interfaces:
-                BRIDGE_DOWN = f'ip link set {iface} down'
-                BRIDGE_DEL = f'brctl delbr {iface}'
+                BRIDGE_DOWN = f"ip link set {iface} down"
+                BRIDGE_DEL = f"brctl delbr {iface}"
                 self.ssh.run(BRIDGE_DOWN)
                 self.ssh.run(BRIDGE_DEL)
 
-    def start_ap(self,
-                 hostapd_config: hostapd_config.HostapdConfig,
-                 radvd_config=None,
-                 setup_bridge=False,
-                 is_nat_enabled=True,
-                 additional_parameters=None):
+    def start_ap(
+        self,
+        hostapd_config: HostapdConfig,
+        radvd_config: RadvdConfig = None,
+        setup_bridge: bool = False,
+        is_nat_enabled: bool = True,
+        additional_parameters: Dict[str, Any] = None,
+    ) -> List[Any]:
         """Starts as an ap using a set of configurations.
 
         This will start an ap on this host. To start an ap the controller
@@ -306,19 +189,17 @@
         for that subnet for any device that connects through that interface.
 
         Args:
-            hostapd_config: hostapd_config.HostapdConfig, The configurations
-                to use when starting up the ap.
-            radvd_config: radvd_config.RadvdConfig, The IPv6 configuration
-                to use when starting up the ap.
+            hostapd_config: The configurations to use when starting up the ap.
+            radvd_config: The IPv6 configuration to use when starting up the ap.
             setup_bridge: Whether to bridge the LAN interface WLAN interface.
                 Only one WLAN interface can be bridged with the LAN interface
                 and none of the guest networks can be bridged.
             is_nat_enabled: If True, start NAT on the AP to allow the DUT to be
                 able to access the internet if the WAN port is connected to the
                 internet.
-            additional_parameters: A dictionary of parameters that can sent
-                directly into the hostapd config file.  This can be used for
-                debugging and or adding one off parameters into the config.
+            additional_parameters: Parameters that can sent directly into the
+                hostapd config file.  This can be used for debugging and or
+                adding one off parameters into the config.
 
         Returns:
             An identifier for each ssid being started. These identifiers can be
@@ -336,13 +217,13 @@
 
         # radvd requires the interface to have a IPv6 link-local address.
         if radvd_config:
-            self.ssh.run(f'sysctl -w net.ipv6.conf.{interface}.disable_ipv6=0')
-            self.ssh.run(f'sysctl -w net.ipv6.conf.{interface}.forwarding=1')
+            self.ssh.run(f"sysctl -w net.ipv6.conf.{interface}.disable_ipv6=0")
+            self.ssh.run(f"sysctl -w net.ipv6.conf.{interface}.forwarding=1")
 
         # In order to handle dhcp servers on any interface, the initiation of
         # the dhcp server must be done after the wlan interfaces are figured
         # out as opposed to being in __init__
-        self._dhcp = dhcp_server.DhcpServer(self.ssh, interface=interface)
+        self._dhcp = DhcpServer(self.ssh, interface=interface)
 
         # For multi bssid configurations the mac address
         # of the wireless interface needs to have enough space to mask out
@@ -352,16 +233,18 @@
         cmd = f"ip link show {interface}|grep ether|awk -F' ' '{{print $2}}'"
         interface_mac_orig = self.ssh.run(cmd)
         if interface == self.wlan_5g:
-            hostapd_config.bssid = interface_mac_orig.stdout[:-1] + '0'
+            hostapd_config.bssid = interface_mac_orig.stdout[:-1] + "0"
             last_octet = 1
         if interface == self.wlan_2g:
-            hostapd_config.bssid = interface_mac_orig.stdout[:-1] + '8'
+            hostapd_config.bssid = interface_mac_orig.stdout[:-1] + "8"
             last_octet = 9
         if interface in self._aps:
-            raise ValueError('No WiFi interface available for AP on '
-                             f'channel {hostapd_config.channel}')
+            raise ValueError(
+                "No WiFi interface available for AP on "
+                f"channel {hostapd_config.channel}"
+            )
 
-        apd = hostapd.Hostapd(self.ssh, interface)
+        apd = Hostapd(self.ssh, interface)
         new_instance = _ApInstance(hostapd=apd, subnet=subnet)
         self._aps[interface] = new_instance
 
@@ -374,7 +257,7 @@
         # on the AP, but not for traffic handled by the Linux networking stack
         # such as ping.
         if radvd_config:
-            self._route_cmd.add_route(interface, 'fe80::/64')
+            self._route_cmd.add_route(interface, "fe80::/64")
 
         self._dhcp_bss = dict()
         if hostapd_config.bss_lookup:
@@ -390,15 +273,17 @@
             for bss in hostapd_config.bss_lookup:
                 if interface_mac_orig:
                     hostapd_config.bss_lookup[bss].bssid = (
-                        interface_mac_orig.stdout[:-1] + hex(last_octet)[-1:])
+                        interface_mac_orig.stdout[:-1] + hex(last_octet)[-1:]
+                    )
                 self._route_cmd.clear_routes(net_interface=str(bss))
                 if interface is self.wlan_2g:
                     starting_ip_range = self._AP_2G_SUBNET_STR
                 else:
                     starting_ip_range = self._AP_5G_SUBNET_STR
-                a, b, c, d = starting_ip_range.split('.')
-                self._dhcp_bss[bss] = dhcp_config.Subnet(
-                    ipaddress.ip_network(f'{a}.{b}.{int(c) + counter}.{d}'))
+                a, b, c, d = starting_ip_range.split(".")
+                self._dhcp_bss[bss] = Subnet(
+                    ipaddress.ip_network(f"{a}.{b}.{int(c) + counter}.{d}")
+                )
                 counter = counter + 1
                 last_octet = last_octet + 1
 
@@ -407,9 +292,10 @@
         # The DHCP serer requires interfaces to have ips and routes before
         # the server will come up.
         interface_ip = ipaddress.ip_interface(
-            f'{subnet.router}/{subnet.network.netmask}')
+            f"{subnet.router}/{subnet.network.netmask}"
+        )
         if setup_bridge is True:
-            bridge_interface_name = 'eth_test'
+            bridge_interface_name = "eth_test"
             self.create_bridge(bridge_interface_name, [interface, self.lan])
             self._ip_cmd.set_ipv4_address(bridge_interface_name, interface_ip)
         else:
@@ -421,13 +307,13 @@
             # variables represent the interface name, k, and dhcp info, v.
             for k, v in self._dhcp_bss.items():
                 bss_interface_ip = ipaddress.ip_interface(
-                    f'{self._dhcp_bss[k].router}/{self._dhcp_bss[k].network.netmask}'
+                    f"{self._dhcp_bss[k].router}/{self._dhcp_bss[k].network.netmask}"
                 )
                 self._ip_cmd.set_ipv4_address(str(k), bss_interface_ip)
 
         # Restart the DHCP server with our updated list of subnets.
         configured_subnets = self.get_configured_subnets()
-        dhcp_conf = dhcp_config.DhcpConfig(subnets=configured_subnets)
+        dhcp_conf = DhcpConfig(subnets=configured_subnets)
         self.start_dhcp(dhcp_conf=dhcp_conf)
         if is_nat_enabled:
             self.start_nat()
@@ -437,7 +323,7 @@
             self.enable_forwarding()
         if radvd_config:
             radvd_interface = bridge_interface_name if setup_bridge else interface
-            self._radvd = radvd.Radvd(self.ssh, radvd_interface)
+            self._radvd = Radvd(self.ssh, radvd_interface)
             self._radvd.start(radvd_config)
         else:
             self._radvd = None
@@ -447,40 +333,40 @@
 
         return bss_interfaces
 
-    def get_configured_subnets(self):
+    def get_configured_subnets(self) -> List[Subnet]:
         """Get the list of configured subnets on the access point.
 
         This allows consumers of the access point objects create custom DHCP
         configs with the correct subnets.
 
-        Returns: a list of dhcp_config.Subnet objects
+        Returns: a list of Subnet objects
         """
         configured_subnets = [x.subnet for x in self._aps.values()]
         for k, v in self._dhcp_bss.items():
             configured_subnets.append(v)
         return configured_subnets
 
-    def start_dhcp(self, dhcp_conf):
+    def start_dhcp(self, dhcp_conf: DhcpConfig) -> None:
         """Start a DHCP server for the specified subnets.
 
         This allows consumers of the access point objects to control DHCP.
 
         Args:
-            dhcp_conf: A dhcp_config.DhcpConfig object.
+            dhcp_conf: A DhcpConfig object.
 
         Raises:
             Error: Raised when a dhcp server error is found.
         """
         self._dhcp.start(config=dhcp_conf)
 
-    def stop_dhcp(self):
+    def stop_dhcp(self) -> None:
         """Stop DHCP for this AP object.
 
         This allows consumers of the access point objects to control DHCP.
         """
         self._dhcp.stop()
 
-    def get_dhcp_logs(self):
+    def get_dhcp_logs(self) -> Optional[str]:
         """Get DHCP logs for this AP object.
 
         This allows consumers of the access point objects to validate DHCP
@@ -494,7 +380,7 @@
             return self._dhcp.get_logs()
         return None
 
-    def get_hostapd_logs(self):
+    def get_hostapd_logs(self) -> Dict[str, str]:
         """Get hostapd logs for all interfaces on AP object.
 
         This allows consumers of the access point objects to validate hostapd
@@ -504,11 +390,10 @@
         """
         hostapd_logs = dict()
         for identifier in self._aps:
-            hostapd_logs[identifier] = self._aps.get(
-                identifier).hostapd.pull_logs()
+            hostapd_logs[identifier] = self._aps.get(identifier).hostapd.pull_logs()
         return hostapd_logs
 
-    def get_radvd_logs(self):
+    def get_radvd_logs(self) -> Optional[str]:
         """Get radvd logs for this AP object.
 
         This allows consumers of the access point objects to validate radvd
@@ -522,16 +407,16 @@
             return self._radvd.pull_logs()
         return None
 
-    def enable_forwarding(self):
+    def enable_forwarding(self) -> None:
         """Enable IPv4 and IPv6 forwarding on the AP.
 
         When forwarding is enabled, the access point is able to route IP packets
         between devices in the same subnet.
         """
-        self.ssh.run('echo 1 > /proc/sys/net/ipv4/ip_forward')
-        self.ssh.run('echo 1 > /proc/sys/net/ipv6/conf/all/forwarding')
+        self.ssh.run("echo 1 > /proc/sys/net/ipv4/ip_forward")
+        self.ssh.run("echo 1 > /proc/sys/net/ipv6/conf/all/forwarding")
 
-    def start_nat(self):
+    def start_nat(self) -> None:
         """Start NAT on the AP.
 
         This allows consumers of the access point objects to enable NAT
@@ -544,11 +429,10 @@
         # the WAN and LAN/WLAN ports.  This means anyone connecting to the
         # WLAN/LAN ports will be able to access the internet if the WAN port
         # is connected to the internet.
-        self.ssh.run('iptables -t nat -F')
-        self.ssh.run(
-            f'iptables -t nat -A POSTROUTING -o {self.wan} -j MASQUERADE')
+        self.ssh.run("iptables -t nat -F")
+        self.ssh.run(f"iptables -t nat -A POSTROUTING -o {self.wan} -j MASQUERADE")
 
-    def stop_nat(self):
+    def stop_nat(self) -> None:
         """Stop NAT on the AP.
 
         This allows consumers of the access point objects to disable NAT on the
@@ -557,9 +441,9 @@
         Note that this is currently a global setting, since we don't have
         per-interface masquerade rules.
         """
-        self.ssh.run('iptables -t nat -F')
+        self.ssh.run("iptables -t nat -F")
 
-    def create_bridge(self, bridge_name, interfaces):
+    def create_bridge(self, bridge_name: str, interfaces: List[str]) -> None:
         """Create the specified bridge and bridge the specified interfaces.
 
         Args:
@@ -568,14 +452,14 @@
         """
 
         # Create the bridge interface
-        self.ssh.run(f'brctl addbr {bridge_name}')
+        self.ssh.run(f"brctl addbr {bridge_name}")
 
         for interface in interfaces:
-            self.ssh.run(f'brctl addif {bridge_name} {interface}')
+            self.ssh.run(f"brctl addif {bridge_name} {interface}")
 
-        self.ssh.run(f'ip link set {bridge_name} up')
+        self.ssh.run(f"ip link set {bridge_name} up")
 
-    def remove_bridge(self, bridge_name):
+    def remove_bridge(self, bridge_name: str) -> None:
         """Removes the specified bridge
 
         Args:
@@ -587,15 +471,15 @@
         #
         # Or if we're doing 2.4Ghz and 5Ghz SSIDs and we've already torn
         # down the bridge once, but we got called for each band.
-        result = self.ssh.run(f'brctl show {bridge_name}', ignore_status=True)
+        result = self.ssh.run(f"brctl show {bridge_name}", ignore_status=True)
 
         # If the bridge exists, we'll get an exit_status of 0, indicating
         # success, so we can continue and remove the bridge.
         if result.exit_status == 0:
-            self.ssh.run(f'ip link set {bridge_name} down')
-            self.ssh.run(f'brctl delbr {bridge_name}')
+            self.ssh.run(f"ip link set {bridge_name} down")
+            self.ssh.run(f"brctl delbr {bridge_name}")
 
-    def get_bssid_from_ssid(self, ssid, band):
+    def get_bssid_from_ssid(self, ssid: str, band: str) -> Optional[str]:
         """Gets the BSSID from a provided SSID
 
         Args:
@@ -611,8 +495,9 @@
         # Get the interface name associated with the given ssid.
         for interface in interfaces:
             iw_output = self.ssh.run(
-                f"iw dev {interface} info|grep ssid|awk -F' ' '{{print $2}}'")
-            if 'command failed: No such device' in iw_output.stderr:
+                f"iw dev {interface} info|grep ssid|awk -F' ' '{{print $2}}'"
+            )
+            if "command failed: No such device" in iw_output.stderr:
                 continue
             else:
                 # If the configured ssid is equal to the given ssid, we found
@@ -624,7 +509,7 @@
                     return iw_output.stdout
         return None
 
-    def stop_ap(self, identifier):
+    def stop_ap(self, identifier: str) -> None:
         """Stops a running ap on this controller.
 
         Args:
@@ -632,7 +517,7 @@
         """
 
         if identifier not in list(self._aps.keys()):
-            raise ValueError(f'Invalid identifier {identifier} given')
+            raise ValueError(f"Invalid identifier {identifier} given")
 
         instance = self._aps.get(identifier)
 
@@ -640,7 +525,7 @@
             self._radvd.stop()
         try:
             self.stop_dhcp()
-        except dhcp_server.NoInterfaceError:
+        except NoInterfaceError:
             pass
         self.stop_nat()
         instance.hostapd.stop()
@@ -650,18 +535,18 @@
         bridge_interfaces = self.interfaces.get_bridge_interface()
         if bridge_interfaces:
             for iface in bridge_interfaces:
-                BRIDGE_DOWN = f'ip link set {iface} down'
-                BRIDGE_DEL = f'brctl delbr {iface}'
+                BRIDGE_DOWN = f"ip link set {iface} down"
+                BRIDGE_DEL = f"brctl delbr {iface}"
                 self.ssh.run(BRIDGE_DOWN)
                 self.ssh.run(BRIDGE_DEL)
 
-    def stop_all_aps(self):
+    def stop_all_aps(self) -> None:
         """Stops all running aps on this device."""
 
         for ap in list(self._aps.keys()):
             self.stop_ap(ap)
 
-    def close(self):
+    def close(self) -> None:
         """Called to take down the entire access point.
 
         When called will stop all aps running on this host, shutdown the dhcp
@@ -672,7 +557,7 @@
             self.stop_all_aps()
         self.ssh.close()
 
-    def generate_bridge_configs(self, channel):
+    def generate_bridge_configs(self, channel: int) -> Tuple[str, Optional[str], str]:
         """Generate a list of configs for a bridge between LAN and WLAN.
 
         Args:
@@ -691,125 +576,65 @@
 
         iface_lan = self.lan
 
-        a, b, c, _ = subnet_str.strip('/24').split('.')
-        bridge_ip = f'{a}.{b}.{c}.{BRIDGE_IP_LAST}'
+        a, b, c, _ = subnet_str.strip("/24").split(".")
+        bridge_ip = f"{a}.{b}.{c}.{BRIDGE_IP_LAST}"
 
-        configs = (iface_wlan, iface_lan, bridge_ip)
+        return (iface_wlan, iface_lan, bridge_ip)
 
-        return configs
+    def ping(
+        self,
+        dest_ip: str,
+        count: int = 3,
+        interval: int = 1000,
+        timeout: int = 1000,
+        size: int = 56,
+        additional_ping_params: Optional[Any] = None,
+    ) -> Dict[str, Any]:
+        """Pings from AP to dest_ip, returns dict of ping stats (see utils.ping)"""
+        return utils.ping(
+            self.ssh,
+            dest_ip,
+            count=count,
+            interval=interval,
+            timeout=timeout,
+            size=size,
+            additional_ping_params=additional_ping_params,
+        )
 
-    def install_scapy(self, scapy_path, send_ra_path):
-        """Install scapy
-
-        Args:
-            scapy_path: path where scapy tar file is located on server
-            send_ra_path: path where sendra path is located on server
-        """
-        self.scapy_install_path = self.ssh.run('mktemp -d').stdout.rstrip()
-        self.log.info(f'Scapy install path: {self.scapy_install_path}')
-        self.ssh.send_file(scapy_path, self.scapy_install_path)
-        self.ssh.send_file(send_ra_path, self.scapy_install_path)
-
-        scapy = os.path.join(self.scapy_install_path,
-                             scapy_path.split('/')[-1])
-
-        self.ssh.run(f'tar -xvf {scapy} -C {self.scapy_install_path}')
-        self.ssh.run(f'cd {self.scapy_install_path}; {SCAPY_INSTALL_COMMAND}')
-
-    def cleanup_scapy(self):
-        """ Cleanup scapy """
-        if self.scapy_install_path:
-            cmd = f'rm -rf {self.scapy_install_path}'
-            self.log.info(f'Cleaning up scapy {cmd}')
-            output = self.ssh.run(cmd)
-            self.scapy_install_path = None
-
-    def send_ra(self,
-                iface,
-                mac=RA_MULTICAST_ADDR,
-                interval=1,
-                count=None,
-                lifetime=LIFETIME,
-                rtt=0):
-        """Invoke scapy and send RA to the device.
-
-        Args:
-          iface: string of the WiFi interface to use for sending packets.
-          mac: string HWAddr/MAC address to send the packets to.
-          interval: int Time to sleep between consecutive packets.
-          count: int Number of packets to be sent.
-          lifetime: int original RA's router lifetime in seconds.
-          rtt: retrans timer of the RA packet
-        """
-        scapy_command = os.path.join(self.scapy_install_path, RA_SCRIPT)
-        options = f' -m {mac} -i {interval} -c {count} -l {lifetime} -in {iface} -rtt {rtt}'
-        cmd = scapy_command + options
-        self.log.info(f'Scapy cmd: {cmd}')
-        self.ssh.run(cmd)
-
-    def get_icmp6intype134(self):
-        """Read the value of Icmp6InType134 and return integer.
-
-        Returns:
-            Integer value >0 if grep is successful; 0 otherwise.
-        """
-        ra_count_str = self.ssh.run(
-            f'grep Icmp6InType134 {PROC_NET_SNMP6} || true').stdout
-        if ra_count_str:
-            return int(ra_count_str.split()[1])
-
-    def ping(self,
-             dest_ip,
-             count=3,
-             interval=1000,
-             timeout=1000,
-             size=56,
-             additional_ping_params=None):
-        """Pings from AP to dest_ip, returns dict of ping stats (see utils.ping)
-        """
-        return utils.ping(self.ssh,
-                          dest_ip,
-                          count=count,
-                          interval=interval,
-                          timeout=timeout,
-                          size=size,
-                          additional_ping_params=additional_ping_params)
-
-    def can_ping(self,
-                 dest_ip,
-                 count=1,
-                 interval=1000,
-                 timeout=1000,
-                 size=56,
-                 additional_ping_params=None):
+    def can_ping(
+        self,
+        dest_ip: str,
+        count: int = 1,
+        interval: int = 1000,
+        timeout: int = 1000,
+        size: int = 56,
+        additional_ping_params: Optional[Any] = None,
+    ) -> bool:
         """Returns whether ap can ping dest_ip (see utils.can_ping)"""
-        return utils.can_ping(self.ssh,
-                              dest_ip,
-                              count=count,
-                              interval=interval,
-                              timeout=timeout,
-                              size=size,
-                              additional_ping_params=additional_ping_params)
+        return utils.can_ping(
+            self.ssh,
+            dest_ip,
+            count=count,
+            interval=interval,
+            timeout=timeout,
+            size=size,
+            additional_ping_params=additional_ping_params,
+        )
 
-    def hard_power_cycle(self,
-                         pdus,
-                         unreachable_timeout=30,
-                         ping_timeout=60,
-                         ssh_timeout=30,
-                         hostapd_configs=None):
+    def hard_power_cycle(
+        self,
+        pdus: List[PduDevice],
+        hostapd_configs: Optional[List[HostapdConfig]] = None,
+    ) -> None:
         """Kills, then restores power to AccessPoint, verifying it goes down and
         comes back online cleanly.
 
         Args:
-            pdus: list, PduDevices in the testbed
-            unreachable_timeout: int, time to wait for AccessPoint to become
-                unreachable
-            ping_timeout: int, time to wait for AccessPoint to responsd to pings
-            ssh_timeout: int, time to wait for AccessPoint to allow SSH
-            hostapd_configs (optional): list, containing hostapd settings. If
-                present, these networks will be spun up after the AP has
-                rebooted. This list can either contain HostapdConfig objects, or
-                    dictionaries with the start_ap params
+            pdus: PDUs in the testbed
+            hostapd_configs: Hostapd settings. If present, these networks will
+                be spun up after the AP has rebooted. This list can either
+                contain HostapdConfig objects, or dictionaries with the start_ap
+                params
                     (i.e  { 'hostapd_config': <HostapdConfig>,
                             'setup_bridge': <bool>,
                             'additional_parameters': <dict> } ).
@@ -818,132 +643,223 @@
             ConnectionError, if AccessPoint fails to go offline or come back.
         """
         if not self.device_pdu_config:
-            raise Error('No PduDevice provided in AccessPoint config.')
+            raise Error("No PduDevice provided in AccessPoint config.")
 
         if hostapd_configs is None:
             hostapd_configs = []
 
-        self.log.info(f'Power cycling')
-        ap_pdu, ap_pdu_port = pdu.get_pdu_port_for_device(
-            self.device_pdu_config, pdus)
+        self.log.info(f"Power cycling")
+        ap_pdu, ap_pdu_port = get_pdu_port_for_device(self.device_pdu_config, pdus)
 
-        self.log.info(f'Killing power')
+        self.log.info(f"Killing power")
         ap_pdu.off(str(ap_pdu_port))
 
-        self.log.info('Verifying AccessPoint is unreachable.')
-        timeout = time.time() + unreachable_timeout
-        while time.time() < timeout:
-            if not utils.can_ping(job, self.ssh_settings.hostname):
-                self.log.info('AccessPoint is unreachable as expected.')
-                break
-            else:
-                self.log.debug(
-                    'AccessPoint is still responding to pings. Retrying in 1 '
-                    'second.')
-                time.sleep(1)
-        else:
-            raise ConnectionError(
-                f'Failed to bring down AccessPoint ({self.ssh_settings.hostname})'
-            )
+        self.log.info("Verifying AccessPoint is unreachable.")
+        self.ssh_provider.wait_until_unreachable()
+        self.log.info("AccessPoint is unreachable as expected.")
+
         self._aps.clear()
 
-        self.log.info(f'Restoring power')
+        self.log.info(f"Restoring power")
         ap_pdu.on(str(ap_pdu_port))
 
-        self.log.info('Waiting for AccessPoint to respond to pings.')
-        timeout = time.time() + ping_timeout
-        while time.time() < timeout:
-            if utils.can_ping(job, self.ssh_settings.hostname):
-                self.log.info('AccessPoint responded to pings.')
-                break
-            else:
-                self.log.debug('AccessPoint is not responding to pings. '
-                               'Retrying in 1 second.')
-                time.sleep(1)
-        else:
-            raise ConnectionError(
-                f'Timed out waiting for AccessPoint ({self.ssh_settings.hostname}) '
-                'to respond to pings.')
-
-        self.log.info('Waiting for AccessPoint to allow ssh connection.')
-        timeout = time.time() + ssh_timeout
-        while time.time() < timeout:
-            try:
-                self.ssh.run('echo')
-            except connection.Error:
-                self.log.debug('AccessPoint is not allowing ssh connection. '
-                               'Retrying in 1 second.')
-                time.sleep(1)
-            else:
-                self.log.info('AccessPoint available via ssh.')
-                break
-        else:
-            raise ConnectionError(
-                f'Timed out waiting for AccessPoint ({self.ssh_settings.hostname}) '
-                'to allow ssh connection.')
+        self.log.info("Waiting for AccessPoint to become available via SSH.")
+        self.ssh_provider.wait_until_reachable()
+        self.log.info("AccessPoint responded to SSH.")
 
         # Allow 5 seconds for OS to finish getting set up
         time.sleep(5)
         self._initial_ap()
-        self.log.info('Power cycled successfully')
+        self.log.info("Power cycled successfully")
 
         for settings in hostapd_configs:
-            if type(settings) == hostapd_config.HostapdConfig:
+            if type(settings) == HostapdConfig:
                 config = settings
                 setup_bridge = False
                 additional_parameters = None
 
             elif type(settings) == dict:
-                config = settings['hostapd_config']
-                setup_bridge = settings.get('setup_bridge', False)
-                additional_parameters = settings.get('additional_parameters',
-                                                     None)
+                config = settings["hostapd_config"]
+                setup_bridge = settings.get("setup_bridge", False)
+                additional_parameters = settings.get("additional_parameters", None)
             else:
                 raise TypeError(
-                    'Items in hostapd_configs list must either be '
-                    'hostapd.HostapdConfig objects or dictionaries.')
+                    "Items in hostapd_configs list must either be "
+                    "HostapdConfig objects or dictionaries."
+                )
 
-            self.log.info(f'Restarting network {config.ssid}')
-            self.start_ap(config,
-                          setup_bridge=setup_bridge,
-                          additional_parameters=additional_parameters)
+            self.log.info(f"Restarting network {config.ssid}")
+            self.start_ap(
+                config,
+                setup_bridge=setup_bridge,
+                additional_parameters=additional_parameters,
+            )
 
-    def channel_switch(self, identifier, channel_num):
+    def channel_switch(self, identifier: str, channel_num: int) -> None:
         """Switch to a different channel on the given AP."""
         if identifier not in list(self._aps.keys()):
-            raise ValueError(f'Invalid identifier {identifier} given')
+            raise ValueError(f"Invalid identifier {identifier} given")
         instance = self._aps.get(identifier)
-        self.log.info(f'channel switch to channel {channel_num}')
+        self.log.info(f"channel switch to channel {channel_num}")
         instance.hostapd.channel_switch(channel_num)
 
-    def get_current_channel(self, identifier):
+    def get_current_channel(self, identifier: str) -> int:
         """Find the current channel on the given AP."""
         if identifier not in list(self._aps.keys()):
-            raise ValueError(f'Invalid identifier {identifier} given')
+            raise ValueError(f"Invalid identifier {identifier} given")
         instance = self._aps.get(identifier)
         return instance.hostapd.get_current_channel()
 
-    def get_stas(self, identifier) -> Set[str]:
+    def get_stas(self, identifier: str) -> Set[str]:
         """Return MAC addresses of all associated STAs on the given AP."""
         if identifier not in list(self._aps.keys()):
-            raise ValueError(f'Invalid identifier {identifier} given')
+            raise ValueError(f"Invalid identifier {identifier} given")
         instance = self._aps.get(identifier)
         return instance.hostapd.get_stas()
 
-    def get_sta_extended_capabilities(self, identifier,
-                                      sta_mac: str) -> ExtendedCapabilities:
+    def get_sta_extended_capabilities(
+        self, identifier: str, sta_mac: str
+    ) -> ExtendedCapabilities:
         """Get extended capabilities for the given STA, as seen by the AP."""
         if identifier not in list(self._aps.keys()):
-            raise ValueError(f'Invalid identifier {identifier} given')
+            raise ValueError(f"Invalid identifier {identifier} given")
         instance = self._aps.get(identifier)
         return instance.hostapd.get_sta_extended_capabilities(sta_mac)
 
     def send_bss_transition_management_req(
-            self, identifier, sta_mac: str,
-            request: BssTransitionManagementRequest):
+        self, identifier: str, sta_mac: str, request: BssTransitionManagementRequest
+    ) -> job.Result:
         """Send a BSS Transition Management request to an associated STA."""
         if identifier not in list(self._aps.keys()):
-            raise ValueError('Invalid identifier {identifier} given')
+            raise ValueError("Invalid identifier {identifier} given")
         instance = self._aps.get(identifier)
-        return instance.hostapd.send_bss_transition_management_req(
-            sta_mac, request)
+        return instance.hostapd.send_bss_transition_management_req(sta_mac, request)
+
+
+def setup_ap(
+    access_point: AccessPoint,
+    profile_name: str,
+    channel: int,
+    ssid: str,
+    mode: Optional[str] = None,
+    preamble: Optional[bool] = None,
+    beacon_interval: Optional[int] = None,
+    dtim_period: Optional[int] = None,
+    frag_threshold: Optional[int] = None,
+    rts_threshold: Optional[int] = None,
+    force_wmm: Optional[bool] = None,
+    hidden: Optional[bool] = False,
+    security: Optional[Security] = None,
+    pmf_support: Optional[int] = None,
+    additional_ap_parameters: Optional[Dict[str, Any]] = None,
+    password: Optional[str] = None,
+    n_capabilities: Optional[List[Any]] = None,
+    ac_capabilities: Optional[List[Any]] = None,
+    vht_bandwidth: Optional[int] = None,
+    wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
+    setup_bridge: bool = False,
+    is_ipv6_enabled: bool = False,
+    is_nat_enabled: bool = True,
+):
+    """Creates a hostapd profile and runs it on an ap. This is a convenience
+    function that allows us to start an ap with a single function, without first
+    creating a hostapd config.
+
+    Args:
+        access_point: An ACTS access_point controller
+        profile_name: The profile name of one of the hostapd ap presets.
+        channel: What channel to set the AP to.
+        preamble: Whether to set short or long preamble
+        beacon_interval: The beacon interval
+        dtim_period: Length of dtim period
+        frag_threshold: Fragmentation threshold
+        rts_threshold: RTS threshold
+        force_wmm: Enable WMM or not
+        hidden: Advertise the SSID or not
+        security: What security to enable.
+        pmf_support: Whether pmf is not disabled, enabled, or required
+        additional_ap_parameters: Additional parameters to send the AP.
+        password: Password to connect to WLAN if necessary.
+        check_connectivity: Whether to check for internet connectivity.
+        wnm_features: WNM features to enable on the AP.
+        setup_bridge: Whether to bridge the LAN interface WLAN interface.
+            Only one WLAN interface can be bridged with the LAN interface
+            and none of the guest networks can be bridged.
+        is_ipv6_enabled: If True, start a IPv6 router advertisement daemon
+        is_nat_enabled: If True, start NAT on the AP to allow the DUT to be able
+            to access the internet if the WAN port is connected to the internet.
+
+    Returns:
+        An identifier for each ssid being started. These identifiers can be
+        used later by this controller to control the ap.
+
+    Raises:
+        Error: When the ap can't be brought up.
+    """
+    ap = create_ap_preset(
+        profile_name=profile_name,
+        iface_wlan_2g=access_point.wlan_2g,
+        iface_wlan_5g=access_point.wlan_5g,
+        channel=channel,
+        ssid=ssid,
+        mode=mode,
+        short_preamble=preamble,
+        beacon_interval=beacon_interval,
+        dtim_period=dtim_period,
+        frag_threshold=frag_threshold,
+        rts_threshold=rts_threshold,
+        force_wmm=force_wmm,
+        hidden=hidden,
+        bss_settings=[],
+        security=security,
+        pmf_support=pmf_support,
+        n_capabilities=n_capabilities,
+        ac_capabilities=ac_capabilities,
+        vht_bandwidth=vht_bandwidth,
+        wnm_features=wnm_features,
+    )
+    return access_point.start_ap(
+        hostapd_config=ap,
+        radvd_config=RadvdConfig() if is_ipv6_enabled else None,
+        setup_bridge=setup_bridge,
+        is_nat_enabled=is_nat_enabled,
+        additional_parameters=additional_ap_parameters,
+    )
+
+
+def create(configs: Any) -> List[AccessPoint]:
+    """Creates ap controllers from a json config.
+
+    Creates an ap controller from either a list, or a single
+    element. The element can either be just the hostname or a dictionary
+    containing the hostname and username of the ap to connect to over ssh.
+
+    Args:
+        The json configs that represent this controller.
+
+    Returns:
+        A new AccessPoint.
+    """
+    return [AccessPoint(c) for c in configs]
+
+
+def destroy(aps: List[AccessPoint]) -> None:
+    """Destroys a list of access points.
+
+    Args:
+        aps: The list of access points to destroy.
+    """
+    for ap in aps:
+        ap.close()
+
+
+def get_info(aps: List[AccessPoint]) -> List[str]:
+    """Get information on a list of access points.
+
+    Args:
+        aps: A list of AccessPoints.
+
+    Returns:
+        A list of all aps hostname.
+    """
+    return [ap.ssh_settings.hostname for ap in aps]
diff --git a/src/antlion/controllers/adb.py b/src/antlion/controllers/adb.py
index 9acfa1e..5c3848d 100644
--- a/src/antlion/controllers/adb.py
+++ b/src/antlion/controllers/adb.py
@@ -26,19 +26,19 @@
 DEFAULT_ADB_TIMEOUT = 60
 DEFAULT_ADB_PULL_TIMEOUT = 180
 
-ADB_REGEX = re.compile('adb:')
+ADB_REGEX = re.compile("adb:")
 # Uses a regex to be backwards compatible with previous versions of ADB
 # (N and above add the serial to the error msg).
-DEVICE_NOT_FOUND_REGEX = re.compile('error: device (?:\'.*?\' )?not found')
-DEVICE_OFFLINE_REGEX = re.compile('error: device offline')
+DEVICE_NOT_FOUND_REGEX = re.compile("error: device (?:'.*?' )?not found")
+DEVICE_OFFLINE_REGEX = re.compile("error: device offline")
 # Raised when adb forward commands fail to forward a port.
-CANNOT_BIND_LISTENER_REGEX = re.compile('error: cannot bind listener:')
+CANNOT_BIND_LISTENER_REGEX = re.compile("error: cannot bind listener:")
 # Expected output is "Android Debug Bridge version 1.0.XX
-ADB_VERSION_REGEX = re.compile('Android Debug Bridge version 1.0.(\d+)')
-GREP_REGEX = re.compile('grep(\s+)')
+ADB_VERSION_REGEX = re.compile("Android Debug Bridge version 1.0.(\d+)")
+GREP_REGEX = re.compile("grep(\s+)")
 
-ROOT_USER_ID = '0'
-SHELL_USER_ID = '2000'
+ROOT_USER_ID = "0"
+SHELL_USER_ID = "2000"
 
 
 def parsing_parcel_output(output):
@@ -51,8 +51,8 @@
         0x00000020: 00350034 00330035 00320038 00310033 '4.5.5.3.8.2.3.1.'
         0x00000030: 00000000                            '....            ')
     """
-    output = ''.join(re.findall(r"'(.*)'", output))
-    return re.sub(r'[.\s]', '', output)
+    output = "".join(re.findall(r"'(.*)'", output))
+    return re.sub(r"[.\s]", "", output)
 
 
 class AdbProxy(object):
@@ -75,7 +75,7 @@
         """
         self.serial = serial
         self._server_local_port = None
-        adb_path = shutil.which('adb')
+        adb_path = shutil.which("adb")
         adb_cmd = [shlex.quote(adb_path)]
         if serial:
             adb_cmd.append("-s %s" % serial)
@@ -87,8 +87,10 @@
             temp_dir = ssh_connection.run("mktemp -d").stdout.strip()
             ssh_connection.send_file(adb_path, temp_dir)
             # Start up a new adb server running as root from the copied binary.
-            remote_adb_cmd = "%s/adb %s root" % (temp_dir, "-s %s" % serial
-                                                 if serial else "")
+            remote_adb_cmd = "%s/adb %s root" % (
+                temp_dir,
+                "-s %s" % serial if serial else "",
+            )
             ssh_connection.run(remote_adb_cmd)
             # Proxy a local port to the adb server port
             local_port = ssh_connection.create_ssh_tunnel(5037)
@@ -101,7 +103,7 @@
 
     def get_user_id(self):
         """Returns the adb user. Either 2000 (shell) or 0 (root)."""
-        return self.shell('id -u')
+        return self.shell("id -u")
 
     def is_root(self, user_id=None):
         """Checks if the user is root.
@@ -156,13 +158,19 @@
             AdbCommandError for errors from commands executed through ADB.
         """
         if isinstance(cmd, list):
-            cmd = ' '.join(cmd)
+            cmd = " ".join(cmd)
         result = job.run(cmd, ignore_status=True, timeout=timeout)
         ret, out, err = result.exit_status, result.stdout, result.stderr
 
-        if any(pattern.match(err) for pattern in
-               [ADB_REGEX, DEVICE_OFFLINE_REGEX, DEVICE_NOT_FOUND_REGEX,
-                CANNOT_BIND_LISTENER_REGEX]):
+        if any(
+            pattern.match(err)
+            for pattern in [
+                ADB_REGEX,
+                DEVICE_OFFLINE_REGEX,
+                DEVICE_NOT_FOUND_REGEX,
+                CANNOT_BIND_LISTENER_REGEX,
+            ]
+        ):
             raise AdbError(cmd=cmd, stdout=out, stderr=err, ret_code=ret)
         if "Result: Parcel" in out:
             return parsing_parcel_output(out)
@@ -173,8 +181,7 @@
         return out
 
     def _exec_adb_cmd(self, name, arg_str, **kwargs):
-        return self._exec_cmd(' '.join((self.adb_str, name, arg_str)),
-                              **kwargs)
+        return self._exec_cmd(" ".join((self.adb_str, name, arg_str)), **kwargs)
 
     def _exec_cmd_nb(self, cmd, **kwargs):
         """Executes adb commands in a new shell, non blocking.
@@ -186,8 +193,7 @@
         return job.run_async(cmd, **kwargs)
 
     def _exec_adb_cmd_nb(self, name, arg_str, **kwargs):
-        return self._exec_cmd_nb(' '.join((self.adb_str, name, arg_str)),
-                                 **kwargs)
+        return self._exec_cmd_nb(" ".join((self.adb_str, name, arg_str)), **kwargs)
 
     def tcp_forward(self, host_port, device_port):
         """Starts tcp forwarding from localhost to this android device.
@@ -206,9 +212,11 @@
             #     device port
             remote_port = self._ssh_connection.find_free_port()
             host_port = self._ssh_connection.create_ssh_tunnel(
-                remote_port, local_port=host_port)
-        output = self.forward("tcp:%d tcp:%d" % (host_port, device_port),
-                              ignore_status=True)
+                remote_port, local_port=host_port
+            )
+        output = self.forward(
+            "tcp:%d tcp:%d" % (host_port, device_port), ignore_status=True
+        )
         # If hinted_port is 0, the output will be the selected port.
         # Otherwise, there will be no output upon successfully
         # forwarding the hinted port.
@@ -229,8 +237,9 @@
         if self._ssh_connection:
             remote_port = self._ssh_connection.close_ssh_tunnel(host_port)
             if remote_port is None:
-                logging.warning("Cannot close unknown forwarded tcp port: %d",
-                                host_port)
+                logging.warning(
+                    "Cannot close unknown forwarded tcp port: %d", host_port
+                )
                 return
             # The actual port we need to disable via adb is on the remote host.
             host_port = remote_port
@@ -254,20 +263,18 @@
     # command.
     def shell(self, command, ignore_status=False, timeout=DEFAULT_ADB_TIMEOUT):
         return self._exec_adb_cmd(
-            'shell',
-            shlex.quote(command),
-            ignore_status=ignore_status,
-            timeout=timeout)
+            "shell", shlex.quote(command), ignore_status=ignore_status, timeout=timeout
+        )
 
     def shell_nb(self, command):
-        return self._exec_adb_cmd_nb('shell', shlex.quote(command))
+        return self._exec_adb_cmd_nb("shell", shlex.quote(command))
 
     def __getattr__(self, name):
         def adb_call(*args, **kwargs):
-            clean_name = name.replace('_', '-')
-            if clean_name in ['pull', 'push', 'remount'] and 'timeout' not in kwargs:
-                kwargs['timeout'] = DEFAULT_ADB_PULL_TIMEOUT
-            arg_str = ' '.join(str(elem) for elem in args)
+            clean_name = name.replace("_", "-")
+            if clean_name in ["pull", "push", "remount"] and "timeout" not in kwargs:
+                kwargs["timeout"] = DEFAULT_ADB_PULL_TIMEOUT
+            arg_str = " ".join(str(elem) for elem in args)
             return self._exec_adb_cmd(clean_name, arg_str, **kwargs)
 
         return adb_call
@@ -282,7 +289,9 @@
         match = re.search(ADB_VERSION_REGEX, version_output)
 
         if not match:
-            logging.error('Unable to capture ADB version from adb version '
-                          'output: %s' % version_output)
-            raise AdbError('adb version', version_output, '', '')
+            logging.error(
+                "Unable to capture ADB version from adb version "
+                "output: %s" % version_output
+            )
+            raise AdbError("adb version", version_output, "", "")
         return int(match.group(1))
diff --git a/src/antlion/controllers/adb_lib/error.py b/src/antlion/controllers/adb_lib/error.py
index 6fcae7d..9599214 100644
--- a/src/antlion/controllers/adb_lib/error.py
+++ b/src/antlion/controllers/adb_lib/error.py
@@ -28,8 +28,12 @@
         self.ret_code = ret_code
 
     def __str__(self):
-        return ("Error executing adb cmd '%s'. ret: %d, stdout: %s, stderr: %s"
-                ) % (self.cmd, self.ret_code, self.stdout, self.stderr)
+        return ("Error executing adb cmd '%s'. ret: %d, stdout: %s, stderr: %s") % (
+            self.cmd,
+            self.ret_code,
+            self.stdout,
+            self.stderr,
+        )
 
 
 class AdbCommandError(AdbError):
diff --git a/src/antlion/controllers/amarisoft_lib/OWNERS b/src/antlion/controllers/amarisoft_lib/OWNERS
deleted file mode 100644
index edee4ef..0000000
--- a/src/antlion/controllers/amarisoft_lib/OWNERS
+++ /dev/null
@@ -1,4 +0,0 @@
-markusliu@google.com
-mollychang@google.com
-angelayu@google.com
-zoeyliu@google.com
diff --git a/src/antlion/controllers/amarisoft_lib/amarisoft_client.py b/src/antlion/controllers/amarisoft_lib/amarisoft_client.py
deleted file mode 100644
index 5386f81..0000000
--- a/src/antlion/controllers/amarisoft_lib/amarisoft_client.py
+++ /dev/null
@@ -1,221 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import asyncio
-import json
-import logging
-from typing import Any, Mapping, Optional, Tuple
-
-from antlion.controllers.amarisoft_lib import ssh_utils
-import immutabledict
-import websockets
-
-_CONFIG_DIR_MAPPING = immutabledict.immutabledict({
-    'enb': '/config/enb.cfg',
-    'mme': '/config/mme.cfg',
-    'ims': '/config/ims.cfg',
-    'mbms': '/config/mbmsgw.cfg',
-    'ots': '/config/ots.cfg'
-})
-
-
-class MessageFailureError(Exception):
-  """Raises an error when the message execution fail."""
-
-
-class AmariSoftClient(ssh_utils.RemoteClient):
-  """The SSH client class interacts with Amarisoft.
-
-    A simulator used to simulate the base station can output different signals
-    according to the network configuration settings.
-    For example: T Mobile NSA LTE band 66 + NR band 71.
-  """
-
-  async def _send_message_to_callbox(self, uri: str,
-                                     msg: str) -> Tuple[str, str]:
-    """Implements async function for send message to the callbox.
-
-    Args:
-      uri: The uri of specific websocket interface.
-      msg: The message to be send to callbox.
-
-    Returns:
-      The response from callbox.
-    """
-    async with websockets.connect(
-        uri, extra_headers={'origin': 'Test'}) as websocket:
-      await websocket.send(msg)
-      head = await websocket.recv()
-      body = await websocket.recv()
-    return head, body
-
-  def send_message(self, port: str, msg: str) -> Tuple[str, str]:
-    """Sends a message to the callbox.
-
-    Args:
-      port: The port of specific websocket interface.
-      msg: The message to be send to callbox.
-
-    Returns:
-      The response from callbox.
-    """
-    return asyncio.get_event_loop().run_until_complete(
-        self._send_message_to_callbox(f'ws://{self.host}:{port}/', msg))
-
-  def verify_response(self, func: str, head: str,
-                      body: str) -> Tuple[Mapping[str, Any], Mapping[str, Any]]:
-    """Makes sure there are no error messages in Amarisoft's response.
-
-    If a message produces an error, response will have an error string field
-    representing the error.
-    For example:
-      {
-        "message": "ready",
-        "message_id": <message id>,
-        "error": <error message>,
-        "type": "ENB",
-        "name: <name>,
-      }
-
-    Args:
-      func: The message send to Amarisoft.
-      head: Responsed message head.
-      body: Responsed message body.
-
-    Returns:
-      Standard output of the shell command.
-
-    Raises:
-       MessageFailureError: Raised when an error occurs in the response message.
-    """
-    loaded_head = json.loads(head)
-    loaded_body = json.loads(body)
-
-    if loaded_head.get('message') != 'ready':
-      raise MessageFailureError(
-          f'Fail to get response from callbox, message: {loaded_head["error"]}')
-    if 'error' in loaded_body:
-      raise MessageFailureError(
-          f'Fail to excute {func} with error message: {loaded_body["error"]}')
-    if loaded_body.get('message') != func:
-      raise MessageFailureError(
-          f'The message sent was {loaded_body["message"]} instead of {func}.')
-    return loaded_head, loaded_body
-
-  def lte_service_stop(self) -> None:
-    """Stops to output signal."""
-    self.run_cmd('systemctl stop lte')
-
-  def lte_service_start(self):
-    """Starts to output signal."""
-    self.run_cmd('systemctl start lte')
-
-  def lte_service_restart(self):
-    """Restarts to output signal."""
-    self.run_cmd('systemctl restart lte')
-
-  def lte_service_enable(self):
-    """lte service remains enable until next reboot."""
-    self.run_cmd('systemctl enable lte')
-
-  def lte_service_disable(self):
-    """lte service remains disable until next reboot."""
-    self.run_cmd('systemctl disable lte')
-
-  def lte_service_is_active(self) -> bool:
-    """Checks lte service is active or not.
-
-    Returns:
-      True if service active, False otherwise.
-    """
-    return not any('inactive' in line
-                   for line in self.run_cmd('systemctl is-active lte'))
-
-  def set_config_dir(self, cfg_type: str, path: str) -> None:
-    """Sets the path of target configuration file.
-
-    Args:
-      cfg_type: The type of target configuration. (e.g. mme, enb ...etc.)
-      path: The path of target configuration. (e.g.
-        /root/lteenb-linux-2020-12-14)
-    """
-    path_old = self.get_config_dir(cfg_type)
-    if path != path_old:
-      logging.info('set new path %s (was %s)', path, path_old)
-      self.run_cmd(f'ln -sfn {path} /root/{cfg_type}')
-    else:
-      logging.info('path %s does not change.', path_old)
-
-  def get_config_dir(self, cfg_type: str) -> Optional[str]:
-    """Gets the path of target configuration.
-
-    Args:
-      cfg_type: Target configuration type. (e.g. mme, enb...etc.)
-
-    Returns:
-      The path of configuration.
-    """
-    result = self.run_cmd(f'readlink /root/{cfg_type}')
-    if result:
-      path = result[0].strip()
-    else:
-      logging.warning('%s path not found.', cfg_type)
-      return None
-    return path
-
-  def set_config_file(self, cfg_type: str, cfg_file: str) -> None:
-    """Sets the configuration to be executed.
-
-    Args:
-      cfg_type: The type of target configuration. (e.g. mme, enb...etc.)
-      cfg_file: The configuration to be executed. (e.g.
-        /root/lteenb-linux-2020-12-14/config/gnb.cfg )
-
-    Raises:
-      FileNotFoundError: Raised when a file or directory is requested but
-      doesn’t exist.
-    """
-    cfg_link = self.get_config_dir(cfg_type) + _CONFIG_DIR_MAPPING[cfg_type]
-    if not self.is_file_exist(cfg_file):
-      raise FileNotFoundError("The command file doesn't exist")
-    self.run_cmd(f'ln -sfn {cfg_file} {cfg_link}')
-
-  def get_config_file(self, cfg_type: str) -> Optional[str]:
-    """Gets the current configuration of specific configuration type.
-
-    Args:
-      cfg_type: The type of target configuration. (e.g. mme, enb...etc.)
-
-    Returns:
-      The current configuration with absolute path.
-    """
-    cfg_path = self.get_config_dir(cfg_type) + _CONFIG_DIR_MAPPING[cfg_type]
-    if cfg_path:
-      result = self.run_cmd(f'readlink {cfg_path}')
-      if result:
-        return result[0].strip()
-
-  def get_all_config_dir(self) -> Mapping[str, str]:
-    """Gets all configuration directions.
-
-    Returns:
-      All configuration directions.
-    """
-    config_dir = {}
-    for cfg_type in ('ots', 'enb', 'mme', 'mbms'):
-      config_dir[cfg_type] = self.get_config_dir(cfg_type)
-      logging.debug('get path of %s: %s', cfg_type, config_dir[cfg_type])
-    return config_dir
diff --git a/src/antlion/controllers/amarisoft_lib/amarisoft_constants.py b/src/antlion/controllers/amarisoft_lib/amarisoft_constants.py
deleted file mode 100644
index c62bf2a..0000000
--- a/src/antlion/controllers/amarisoft_lib/amarisoft_constants.py
+++ /dev/null
@@ -1,14 +0,0 @@
-"""Constants for test."""

-

-

-# ports of lte service websocket interface

-class PortNumber:

-  URI_MME = '9000'

-  URI_ENB = '9001'

-  URI_UE = '9002'

-  URI_IMS = '9003'

-  URI_MBMS = '9004'

-  URI_PROBE = '9005'

-  URI_LICENSE = '9006'

-  URI_MON = '9007'

-  URI_VIEW = '9008'

diff --git a/src/antlion/controllers/amarisoft_lib/config_utils.py b/src/antlion/controllers/amarisoft_lib/config_utils.py
deleted file mode 100644
index 8d3b603..0000000
--- a/src/antlion/controllers/amarisoft_lib/config_utils.py
+++ /dev/null
@@ -1,201 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import enum
-import os
-import immutabledict
-
-from antlion.controllers.amarisoft_lib import amarisoft_client
-
-TEMPLATE_PATH = os.path.dirname(os.path.abspath(__file__)) + '/config_templates'
-TEMPLATE_PATH_ENB = f'{TEMPLATE_PATH}/enb/'
-TEMPLATE_PATH_MME = f'{TEMPLATE_PATH}/mme/'
-
-_CLIENT_CONFIG_DIR_MAPPING = immutabledict.immutabledict({
-    'enb': '/config/mhtest_enb.cfg',
-    'mme': '/config/mhtest_mme.cfg',
-})
-
-
-class EnbCfg():
-  """MME configuration templates."""
-  ENB_GENERIC = 'enb-single-generic.cfg'
-  GNB_NSA_GENERIC = 'gnb-nsa-lte-ho-generic.cfg'
-  GNB_SA_GENERIC = 'gnb-sa-lte-ho-generic.cfg'
-
-
-class MmeCfg():
-  """MME configuration templates."""
-  MME_GENERIC = 'mme-generic.cfg'
-
-
-class SpecTech(enum.Enum):
-  """Spectrum usage techniques."""
-  FDD = 0
-  TDD = 1
-
-
-class ConfigUtils():
-  """Utilities for set Amarisoft configs.
-
-  Attributes:
-    remote: An amarisoft client.
-  """
-
-  def __init__(self, remote: amarisoft_client.AmariSoftClient):
-    self.remote = remote
-
-  def upload_enb_template(self, cfg: str) -> bool:
-    """Loads ENB configuration.
-
-    Args:
-      cfg: The ENB configuration to be loaded.
-
-    Returns:
-      True if the ENB configuration was loaded successfully, False otherwise.
-    """
-    cfg_template = TEMPLATE_PATH_ENB + cfg
-    if not os.path.isfile(cfg_template):
-      return False
-    cfg_path = self.remote.get_config_dir(
-        'enb') + _CLIENT_CONFIG_DIR_MAPPING['enb']
-    self.remote.run_cmd('rm -f ' + cfg_path)
-    self.remote.sftp_upload(cfg_template, cfg_path)
-    self.remote.set_config_file('enb', cfg_path)
-    if not self.remote.is_file_exist(cfg_path):
-      return False
-    return True
-
-  def upload_mme_template(self, cfg: str) -> bool:
-    """Loads MME configuration.
-
-    Args:
-      cfg: The MME configuration to be loaded.
-
-    Returns:
-      True if the ENB configuration was loaded successfully, False otherwise.
-    """
-    cfg_template = TEMPLATE_PATH_MME + cfg
-    if not os.path.isfile(cfg_template):
-      return False
-    cfg_path = self.remote.get_config_dir(
-        'mme') + _CLIENT_CONFIG_DIR_MAPPING['mme']
-    self.remote.run_cmd('rm -f ' + cfg_path)
-    self.remote.sftp_upload(cfg_template, cfg_path)
-    self.remote.set_config_file('mme', cfg_path)
-    if not self.remote.is_file_exist(cfg_path):
-      return False
-    return True
-
-  def enb_set_plmn(self, plmn: str) -> bool:
-    """Sets the PLMN in ENB configuration.
-
-    Args:
-      plmn: The PLMN to be set. ex: 311480
-
-    Returns:
-      True if set PLMN successfully, False otherwise.
-    """
-    cfg_path = self.remote.get_config_dir(
-        'enb') + _CLIENT_CONFIG_DIR_MAPPING['enb']
-    if not self.remote.is_file_exist(cfg_path):
-      return False
-    string_from = '#define PLMN \"00101\"'
-    string_to = f'#define PLMN \"{plmn}\"'
-    self.remote.run_cmd(f'sed -i \'s/\\r//g\' {cfg_path}')
-    self.remote.run_cmd(
-        f'sed -i \':a;N;$!ba;s/{string_from}/{string_to}/g\' {cfg_path}')
-    return True
-
-  def mme_set_plmn(self, plmn: str) -> bool:
-    """Sets the PLMN in MME configuration.
-
-    Args:
-      plmn: The PLMN to be set. ex:'311480'
-
-    Returns:
-      True if set PLMN successfully, False otherwise.
-    """
-    cfg_path = self.remote.get_config_dir(
-        'mme') + _CLIENT_CONFIG_DIR_MAPPING['mme']
-    if not self.remote.is_file_exist(cfg_path):
-      return False
-    string_from = '#define PLMN \"00101\"'
-    string_to = f'#define PLMN \"{plmn}\"'
-    self.remote.run_cmd(f'sed -i \'s/\\r//g\' {cfg_path}')
-    self.remote.run_cmd(
-        f'sed -i \':a;N;$!ba;s/{string_from}/{string_to}/g\' {cfg_path}')
-    return True
-
-  def enb_set_fdd_arfcn(self, arfcn: int) -> bool:
-    """Sets the FDD ARFCN in ENB configuration.
-
-    Args:
-      arfcn: The arfcn to be set. ex: 1400
-
-    Returns:
-      True if set FDD ARFCN successfully, False otherwise.
-    """
-    cfg_path = self.remote.get_config_dir(
-        'enb') + _CLIENT_CONFIG_DIR_MAPPING['enb']
-    if not self.remote.is_file_exist(cfg_path):
-      return False
-    string_from = '#define FDD_CELL_earfcn 1400'
-    string_to = f'#define FDD_CELL_earfcn {arfcn}'
-    self.remote.run_cmd(f'sed -i \'s/\\r//g\' {cfg_path}')
-    self.remote.run_cmd(
-        f'sed -i \':a;N;$!ba;s/{string_from}/{string_to}/g\' {cfg_path}')
-    return True
-
-  def enb_set_tdd_arfcn(self, arfcn: int) -> bool:
-    """Sets the TDD ARFCN in ENB configuration.
-
-    Args:
-      arfcn: The arfcn to be set. ex: 1400
-
-    Returns:
-      True if set FDD ARFCN successfully, False otherwise.
-    """
-    cfg_path = self.remote.get_config_dir(
-        'enb') + _CLIENT_CONFIG_DIR_MAPPING['enb']
-    if not self.remote.is_file_exist(cfg_path):
-      return False
-    string_from = '#define TDD_CELL_earfcn 40620'
-    string_to = f'#define TDD_CELL_earfcn {arfcn}'
-    self.remote.run_cmd(f'sed -i \'s/\\r//g\' {cfg_path}')
-    self.remote.run_cmd(
-        f'sed -i \':a;N;$!ba;s/{string_from}/{string_to}/g\' {cfg_path}')
-    return True
-
-  def enb_set_spectrum_tech(self, tech: int) -> bool:
-    """Sets the spectrum usage techniques in ENB configuration.
-
-    Args:
-      tech: the spectrum usage techniques. ex: SpecTech.FDD.name
-
-    Returns:
-      True if set spectrum usage techniques successfully, False otherwise.
-    """
-    cfg_path = self.remote.get_config_dir(
-        'enb') + _CLIENT_CONFIG_DIR_MAPPING['enb']
-    if not self.remote.is_file_exist(cfg_path):
-      return False
-    string_from = '#define TDD 0'
-    string_to = f'#define TDD {tech}'
-    self.remote.run_cmd(f'sed -i \'s/\\r//g\' {cfg_path}')
-    self.remote.run_cmd(
-        f'sed -i \':a;N;$!ba;s/{string_from}/{string_to}/g\' {cfg_path}')
-    return True
diff --git a/src/antlion/controllers/amarisoft_lib/ims.py b/src/antlion/controllers/amarisoft_lib/ims.py
deleted file mode 100644
index ee575c4..0000000
--- a/src/antlion/controllers/amarisoft_lib/ims.py
+++ /dev/null
@@ -1,156 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import logging
-from typing import Any, Mapping, Optional, Union
-
-from antlion.controllers.amarisoft_lib import amarisoft_client
-from antlion.controllers.amarisoft_lib import amarisoft_constants as const
-
-
-class ImsFunctions():
-  """Utilities for Amarisoft's IMS Remote API.
-
-  Attributes:
-    remote: An amarisoft client.
-  """
-
-  def __init__(self, remote: amarisoft_client.AmariSoftClient):
-    self.remote = remote
-
-  def make_call(self,
-              impi: str,
-              impu: str,
-              contact: str,
-              sip_file: str = 'mt_call_qos.sdp',
-              caller: str = 'Amarisoft',
-              duration: int = 30) -> None:
-    """Performs MT call from callbox to test device.
-
-    Args:
-      impi: IMPI (IP Multimedia Private identity) of user to call.
-      impu: IMPU (IP Multimedia Public identity) of user to call.
-      contact: Contact SIP uri of user to call.
-      sip_file: Define file to use as sdp.
-      caller: The number/ID is displayed as the caller.
-      duration: If set, call duration in seconds (The server will close the
-        dialog).
-    """
-    msg = {}
-    msg['message'] = 'mt_call'
-    msg['impi'] = impi
-    msg['impu'] = impu
-    msg['contact'] = contact
-    msg['sip_file'] = sip_file
-    msg['caller'] = caller
-    msg['duration'] = duration
-    dump_msg = json.dumps(msg)
-    logging.debug('mt_call dump msg = %s', dump_msg)
-    head, body = self.remote.send_message(const.PortNumber.URI_IMS, dump_msg)
-    self.remote.verify_response('mt_call', head, body)
-
-  def send_sms(self,
-               text: str,
-               impi: str,
-               sender: Optional[str] = 'Amarisoft') -> None:
-    """Sends SMS to assigned device which connect to Amarisoft.
-
-    Args:
-      text: SMS text to send.
-      impi: IMPI (IP Multimedia Private identity) of user.
-      sender: Sets SMS sender.
-    """
-    msg = {}
-    msg['message'] = 'sms'
-    msg['text'] = text
-    msg['impi'] = impi
-    msg['sender'] = sender
-    dump_msg = json.dumps(msg)
-    logging.debug('send_sms dump msg = %s', dump_msg)
-    head, body = self.remote.send_message(const.PortNumber.URI_IMS, dump_msg)
-    self.remote.verify_response('sms', head, body)
-
-  def send_mms(self, filename: str, sender: str, receiver: str) -> None:
-    """Sends MMS to assigned device which connect to Amarisoft.
-
-    Args:
-      filename: File name with absolute path to send. Extensions jpg, jpeg, png,
-        gif and txt are supported.
-      sender: IMPI (IP Multimedia Private identity) of user.
-      receiver: IMPU (IP Multimedia Public identity) of user.
-    """
-    msg = {}
-    msg['message'] = 'mms'
-    msg['filename'] = filename
-    msg['sender'] = sender
-    msg['receiver'] = receiver
-    dump_msg = json.dumps(msg)
-    logging.debug('send_mms dump msg = %s', dump_msg)
-    head, body = self.remote.send_message(const.PortNumber.URI_IMS, dump_msg)
-    self.remote.verify_response('mms', head, body)
-
-  def users_get(self, registered_only: bool = True) -> Mapping[str, Any]:
-    """Gets users state.
-
-    Args:
-      registered_only: If set, only registered user will be dumped.
-
-    Returns:
-      The user information.
-    """
-    msg = {}
-    msg['message'] = 'users_get'
-    msg['registered_only'] = registered_only
-    dump_msg = json.dumps(msg)
-    logging.debug('users_get dump msg = %s', dump_msg)
-    head, body = self.remote.send_message(const.PortNumber.URI_IMS, dump_msg)
-    _, loaded_body = self.remote.verify_response('users_get', head, body)
-    return loaded_body
-
-  def get_impu(self, impi) -> Union[str, None]:
-    """Obtains the IMPU of the target user according to IMPI.
-
-    Args:
-      impi: IMPI (IP Multimedia Private identity) of user to call. ex:
-        "310260123456785@ims.mnc260.mcc310.3gppnetwork.org"
-
-    Returns:
-      The IMPU of target user.
-    """
-    body = self.users_get(True)
-    for index in range(len(body['users'])):
-      if impi in body['users'][index]['impi']:
-        impu = body['users'][index]['bindings'][0]['impu'][1]
-        return impu
-    return None
-
-  def get_uri(self, impi) -> Union[str, None]:
-    """Obtains the URI of the target user according to IMPI.
-
-    Args:
-      impi: IMPI (IP Multimedia Private identity) of user to call. ex:
-        "310260123456785@ims.mnc260.mcc310.3gppnetwork.org"
-
-    Returns:
-      The URI of target user.
-    """
-    body = self.users_get(True)
-    for index in range(len(body['users'])):
-      if impi in body['users'][index]['impi']:
-        uri = body['users'][index]['bindings'][0]['uri']
-        return uri
-    return None
diff --git a/src/antlion/controllers/amarisoft_lib/mme.py b/src/antlion/controllers/amarisoft_lib/mme.py
deleted file mode 100644
index 6f7ee42..0000000
--- a/src/antlion/controllers/amarisoft_lib/mme.py
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import logging
-
-from antlion.controllers.amarisoft_lib import amarisoft_constants as const
-from antlion.controllers.amarisoft_lib import amarisoft_client
-
-
-class MmeFunctions():
-  """Utilities for Amarisoft's MME Remote API.
-
-  Attributes:
-    remote: An amarisoft client.
-  """
-
-  def __init__(self, remote: amarisoft_client.AmariSoftClient):
-    self.remote = remote
-
-  def pws_write(self, local_id: str, n50: bool = False):
-    """Broadcasts emergency alert message.
-
-    Args:
-      local_id: ID of the message as defined by local identifier in MME
-        configuration file.
-      n50: If True, N50 interface is used, otherwise SBC interface is used. (see TS 23.041)
-    """
-    msg = {}
-    msg['message'] = 'pws_write'
-    msg['local_id'] = local_id
-    msg['nf'] = n50
-    dump_msg = json.dumps(msg)
-    logging.debug('pws_write dump msg = %s', dump_msg)
-    head, body = self.remote.send_message(const.PortNumber.URI_MME, dump_msg)
-    self.remote.verify_response('pws_write', head, body)
-
-  def pws_kill(self, local_id: str, n50: bool = False):
-    """Stops broadcasts emergency alert message.
-
-    Args:
-      local_id: ID of the message as defined by local identifier in MME
-        configuration file.
-      n50: If True, N50 interface is used, otherwise SBC interface is used. (see TS 23.041)
-    """
-    msg = {}
-    msg['message'] = 'pws_kill'
-    msg['local_id'] = local_id
-    msg['nf'] = n50
-    dump_msg = json.dumps(msg)
-    logging.debug('pws_kill dump msg = %s', dump_msg)
-    head, body = self.remote.send_message(const.PortNumber.URI_MME, dump_msg)
-    self.remote.verify_response('pws_kill', head, body)
-
-  def ue_del(self, imsi: str):
-    """Remove UE from the UE database and force disconnect if necessary.
-
-    Args:
-      imsi: IMSI of the UE to delete.
-    """
-    msg = {}
-    msg['message'] = 'ue_del'
-    msg['imsi'] = imsi
-    dump_msg = json.dumps(msg)
-    logging.debug('ue_del dump msg = %s', dump_msg)
-    head, body = self.remote.send_message(const.PortNumber.URI_MME, dump_msg)
-    self.remote.verify_response('ue_del', head, body)
diff --git a/src/antlion/controllers/amarisoft_lib/ssh_utils.py b/src/antlion/controllers/amarisoft_lib/ssh_utils.py
deleted file mode 100644
index fccc1d7..0000000
--- a/src/antlion/controllers/amarisoft_lib/ssh_utils.py
+++ /dev/null
@@ -1,195 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-from typing import Sequence
-
-import paramiko
-
-COMMAND_RETRY_TIMES = 3
-
-
-class RunCommandError(Exception):
-  """Raises an error when run command fail."""
-
-
-class NotConnectedError(Exception):
-  """Raises an error when run command without SSH connect."""
-
-
-class RemoteClient:
-  """The SSH client class interacts with the test machine.
-
-  Attributes:
-    host: A string representing the IP address of amarisoft.
-    port: A string representing the default port of SSH.
-    username: A string representing the username of amarisoft.
-    password: A string representing the password of amarisoft.
-    ssh: A SSH client.
-    sftp: A SFTP client.
-  """
-
-  def __init__(self,
-               host: str,
-               username: str,
-               password: str,
-               port: str = '22') -> None:
-    self.host = host
-    self.port = port
-    self.username = username
-    self.password = password
-    self.ssh = paramiko.SSHClient()
-    self.sftp = None
-
-  def ssh_is_connected(self) -> bool:
-    """Checks SSH connect or not.
-
-    Returns:
-      True if SSH is connected, False otherwise.
-    """
-    return self.ssh and self.ssh.get_transport().is_active()
-
-  def ssh_close(self) -> bool:
-    """Closes the SSH connection.
-
-    Returns:
-      True if ssh session closed, False otherwise.
-    """
-    for _ in range(COMMAND_RETRY_TIMES):
-      if self.ssh_is_connected():
-        self.ssh.close()
-      else:
-        return True
-    return False
-
-  def connect(self) -> bool:
-    """Creats SSH connection.
-
-    Returns:
-      True if success, False otherwise.
-    """
-    for _ in range(COMMAND_RETRY_TIMES):
-      try:
-        self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
-        self.ssh.connect(self.host, self.port, self.username, self.password)
-        self.ssh.get_transport().set_keepalive(1)
-        self.sftp = paramiko.SFTPClient.from_transport(self.ssh.get_transport())
-        return True
-      except Exception:  # pylint: disable=broad-except
-        self.ssh_close()
-    return False
-
-  def run_cmd(self, cmd: str) -> Sequence[str]:
-    """Runs shell command.
-
-    Args:
-      cmd: Command to be executed.
-
-    Returns:
-      Standard output of the shell command.
-
-    Raises:
-       RunCommandError: Raise error when command failed.
-       NotConnectedError: Raised when run command without SSH connect.
-    """
-    if not self.ssh_is_connected():
-      raise NotConnectedError('ssh remote has not been established')
-
-    logging.debug('ssh remote -> %s', cmd)
-    _, stdout, stderr = self.ssh.exec_command(cmd)
-    err = stderr.readlines()
-    if err:
-      logging.error('command failed.')
-      raise RunCommandError(err)
-    return stdout.readlines()
-
-  def is_file_exist(self, file: str) -> bool:
-    """Checks target file exist.
-
-    Args:
-        file: Target file with absolute path.
-
-    Returns:
-        True if file exist, false otherwise.
-    """
-    return any('exist' in line for line in self.run_cmd(
-        f'if [ -f "{file}" ]; then echo -e "exist"; fi'))
-
-  def sftp_upload(self, src: str, dst: str) -> bool:
-    """Uploads a local file to remote side.
-
-    Args:
-      src: The target file with absolute path.
-      dst: The absolute path to put the file with file name.
-      For example:
-        upload('/usr/local/google/home/zoeyliu/Desktop/sample_config.yml',
-        '/root/sample_config.yml')
-
-    Returns:
-      True if file upload success, False otherwise.
-
-    Raises:
-       NotConnectedError: Raised when run command without SSH connect.
-    """
-    if not self.ssh_is_connected():
-      raise NotConnectedError('ssh remote has not been established')
-    if not self.sftp:
-      raise NotConnectedError('sftp remote has not been established')
-
-    logging.info('[local] %s -> [remote] %s', src, dst)
-    self.sftp.put(src, dst)
-    return self.is_file_exist(dst)
-
-  def sftp_download(self, src: str, dst: str) -> bool:
-    """Downloads a file to local.
-
-    Args:
-      src: The target file with absolute path.
-      dst: The absolute path to put the file.
-
-    Returns:
-      True if file download success, False otherwise.
-
-    Raises:
-       NotConnectedError: Raised when run command without SSH connect.
-    """
-    if not self.ssh_is_connected():
-      raise NotConnectedError('ssh remote has not been established')
-    if not self.sftp:
-      raise NotConnectedError('sftp remote has not been established')
-
-    logging.info('[remote] %s -> [local] %s', src, dst)
-    self.sftp.get(src, dst)
-    return self.is_file_exist(dst)
-
-  def sftp_list_dir(self, path: str) -> Sequence[str]:
-    """Lists the names of the entries in the given path.
-
-    Args:
-      path: The path of the list.
-
-    Returns:
-      The names of the entries in the given path.
-
-    Raises:
-       NotConnectedError: Raised when run command without SSH connect.
-    """
-    if not self.ssh_is_connected():
-      raise NotConnectedError('ssh remote has not been established')
-    if not self.sftp:
-      raise NotConnectedError('sftp remote has not been established')
-    return sorted(self.sftp.listdir(path))
-
diff --git a/src/antlion/controllers/android_device.py b/src/antlion/controllers/android_device.py
index 1668d82..0eb0969 100755
--- a/src/antlion/controllers/android_device.py
+++ b/src/antlion/controllers/android_device.py
@@ -53,11 +53,20 @@
 ANDROID_DEVICE_ADB_LOGCAT_PARAM_KEY = "adb_logcat_param"
 ANDROID_DEVICE_EMPTY_CONFIG_MSG = "Configuration is empty, abort!"
 ANDROID_DEVICE_NOT_LIST_CONFIG_MSG = "Configuration should be a list, abort!"
-CRASH_REPORT_PATHS = ("/data/tombstones/", "/data/vendor/ramdump/",
-                      "/data/ramdump/", "/data/vendor/ssrdump",
-                      "/data/vendor/ramdump/bluetooth", "/data/vendor/log/cbd")
-CRASH_REPORT_SKIPS = ("RAMDUMP_RESERVED", "RAMDUMP_STATUS", "RAMDUMP_OUTPUT",
-                      "bluetooth")
+CRASH_REPORT_PATHS = (
+    "/data/tombstones/",
+    "/data/vendor/ramdump/",
+    "/data/ramdump/",
+    "/data/vendor/ssrdump",
+    "/data/vendor/ramdump/bluetooth",
+    "/data/vendor/log/cbd",
+)
+CRASH_REPORT_SKIPS = (
+    "RAMDUMP_RESERVED",
+    "RAMDUMP_STATUS",
+    "RAMDUMP_OUTPUT",
+    "bluetooth",
+)
 ALWAYS_ON_LOG_PATH = "/data/vendor/radio/logs/always-on"
 DEFAULT_QXDM_LOG_PATH = "/data/vendor/radio/diag_logs"
 DEFAULT_SDM_LOG_PATH = "/data/vendor/slog/"
@@ -72,7 +81,7 @@
 WAIT_FOR_DEVICE_TIMEOUT = 180
 ENCRYPTION_WINDOW = "CryptKeeper"
 DEFAULT_DEVICE_PASSWORD = "1111"
-RELEASE_ID_REGEXES = [re.compile(r'\w+\.\d+\.\d+'), re.compile(r'N\w+')]
+RELEASE_ID_REGEXES = [re.compile(r"\w+\.\d+\.\d+"), re.compile(r"N\w+")]
 
 
 def create(configs):
@@ -90,8 +99,7 @@
     elif configs == ANDROID_DEVICE_PICK_ALL_TOKEN:
         ads = get_all_instances()
     elif not isinstance(configs, list):
-        raise errors.AndroidDeviceConfigError(
-            ANDROID_DEVICE_NOT_LIST_CONFIG_MSG)
+        raise errors.AndroidDeviceConfigError(ANDROID_DEVICE_NOT_LIST_CONFIG_MSG)
     elif isinstance(configs[0], str):
         # Configs is a list of serials.
         ads = get_instances(configs)
@@ -104,9 +112,10 @@
     for ad in ads:
         if not ad.is_connected():
             raise errors.AndroidDeviceError(
-                ("Android device %s is specified in config"
-                 " but is not attached.") % ad.serial,
-                serial=ad.serial)
+                ("Android device %s is specified in config" " but is not attached.")
+                % ad.serial,
+                serial=ad.serial,
+            )
     _start_services_on_ads(ads)
     for ad in ads:
         if ad.droid:
@@ -160,7 +169,7 @@
         try:
             ad.start_services()
         except:
-            ad.log.exception('Failed to start some services, abort!')
+            ad.log.exception("Failed to start some services, abort!")
             destroy(running_ads)
             raise
 
@@ -231,46 +240,50 @@
     results = []
     for c in configs:
         try:
-            serial = c.pop('serial')
+            serial = c.pop("serial")
         except KeyError:
             raise errors.AndroidDeviceConfigError(
-                "Required value 'serial' is missing in AndroidDevice config %s."
-                % c)
+                "Required value 'serial' is missing in AndroidDevice config %s." % c
+            )
         client_port = 0
         if ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY in c:
             try:
                 client_port = int(c.pop(ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY))
             except ValueError:
                 raise errors.AndroidDeviceConfigError(
-                    "'%s' is not a valid number for config %s" %
-                    (ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY, c))
+                    "'%s' is not a valid number for config %s"
+                    % (ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY, c)
+                )
         server_port = None
         if ANDROID_DEVICE_SL4A_SERVER_PORT_KEY in c:
             try:
                 server_port = int(c.pop(ANDROID_DEVICE_SL4A_SERVER_PORT_KEY))
             except ValueError:
                 raise errors.AndroidDeviceConfigError(
-                    "'%s' is not a valid number for config %s" %
-                    (ANDROID_DEVICE_SL4A_SERVER_PORT_KEY, c))
+                    "'%s' is not a valid number for config %s"
+                    % (ANDROID_DEVICE_SL4A_SERVER_PORT_KEY, c)
+                )
         forwarded_port = 0
         if ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY in c:
             try:
-                forwarded_port = int(
-                    c.pop(ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY))
+                forwarded_port = int(c.pop(ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY))
             except ValueError:
                 raise errors.AndroidDeviceConfigError(
-                    "'%s' is not a valid number for config %s" %
-                    (ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY, c))
-        ssh_config = c.pop('ssh_config', None)
+                    "'%s' is not a valid number for config %s"
+                    % (ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY, c)
+                )
+        ssh_config = c.pop("ssh_config", None)
         ssh_connection = None
         if ssh_config is not None:
             ssh_settings = settings.from_config(ssh_config)
             ssh_connection = connection.SshConnection(ssh_settings)
-        ad = AndroidDevice(serial,
-                           ssh_connection=ssh_connection,
-                           client_port=client_port,
-                           forwarded_port=forwarded_port,
-                           server_port=server_port)
+        ad = AndroidDevice(
+            serial,
+            ssh_connection=ssh_connection,
+            client_port=client_port,
+            forwarded_port=forwarded_port,
+            server_port=server_port,
+        )
         ad.load_config(c)
         results.append(ad)
     return results
@@ -342,8 +355,8 @@
     filtered = filter_devices(ads, _get_device_filter)
     if not filtered:
         raise ValueError(
-            "Could not find a target device that matches condition: %s." %
-            kwargs)
+            "Could not find a target device that matches condition: %s." % kwargs
+        )
     elif len(filtered) == 1:
         return filtered[0]
     else:
@@ -397,31 +410,32 @@
 
     """
 
-    def __init__(self,
-                 serial='',
-                 ssh_connection=None,
-                 client_port=0,
-                 forwarded_port=0,
-                 server_port=None):
+    def __init__(
+        self,
+        serial="",
+        ssh_connection=None,
+        client_port=0,
+        forwarded_port=0,
+        server_port=None,
+    ):
         self.serial = serial
         # logging.log_path only exists when this is used in an ACTS test run.
-        log_path_base = getattr(logging, 'log_path', '/tmp/logs')
-        self.log_dir = 'AndroidDevice%s' % serial
+        log_path_base = getattr(logging, "log_path", "/tmp/logs")
+        self.log_dir = "AndroidDevice%s" % serial
         self.log_path = os.path.join(log_path_base, self.log_dir)
         self.client_port = client_port
         self.forwarded_port = forwarded_port
         self.server_port = server_port
         self.log = tracelogger.TraceLogger(
-            AndroidDeviceLoggerAdapter(logging.getLogger(),
-                                       {'serial': serial}))
+            AndroidDeviceLoggerAdapter(logging.getLogger(), {"serial": serial})
+        )
         self._event_dispatchers = {}
         self._services = []
         self.register_service(services.AdbLogcatService(self))
         self.register_service(services.Sl4aService(self))
         self.adb_logcat_process = None
         self.adb = adb.AdbProxy(serial, ssh_connection=ssh_connection)
-        self.fastboot = fastboot.FastbootProxy(serial,
-                                               ssh_connection=ssh_connection)
+        self.fastboot = fastboot.FastbootProxy(serial, ssh_connection=ssh_connection)
         if not self.is_bootloader:
             self.root_adb()
         self._ssh_connection = ssh_connection
@@ -474,7 +488,7 @@
         self.start_services()
 
     def register_service(self, service):
-        """Registers the service on the device. """
+        """Registers the service on the device."""
         service.register()
         self._services.append(service)
 
@@ -499,8 +513,9 @@
 
         Stop adb logcat and terminate sl4a sessions if exist.
         """
-        event_bus.post(android_events.AndroidStopServicesEvent(self),
-                       ignore_errors=True)
+        event_bus.post(
+            android_events.AndroidStopServicesEvent(self), ignore_errors=True
+        )
 
     def is_connected(self):
         out = self.adb.devices()
@@ -519,8 +534,7 @@
             device is in bootloader mode.
         """
         if self.is_bootloader:
-            self.log.error("Device is in fastboot mode, could not get build "
-                           "info.")
+            self.log.error("Device is in fastboot mode, could not get build " "info.")
             return
 
         build_id = self.adb.getprop("ro.build.id")
@@ -536,7 +550,7 @@
         info = {
             "build_id": build_id,
             "incremental_build_id": incremental_build_id,
-            "build_type": self.adb.getprop("ro.build.type")
+            "build_type": self.adb.getprop("ro.build.type"),
         }
         return info
 
@@ -548,11 +562,11 @@
         can be added via `add_device_info`.
         """
         info = {
-            'serial': self.serial,
-            'model': self.model,
-            'build_info': self.build_info,
-            'user_added_info': self._user_added_device_info,
-            'flavor': self.flavor
+            "serial": self.serial,
+            "model": self.model,
+            "build_info": self.build_info,
+            "user_added_info": self._user_added_device_info,
+            "flavor": self.flavor,
         }
         return info
 
@@ -571,23 +585,19 @@
         if self._sdk_api_level is not None:
             return self._sdk_api_level
         if self.is_bootloader:
-            self.log.error(
-                'Device is in fastboot mode. Cannot get build info.')
+            self.log.error("Device is in fastboot mode. Cannot get build info.")
             return
-        self._sdk_api_level = int(
-            self.adb.shell('getprop ro.build.version.sdk'))
+        self._sdk_api_level = int(self.adb.shell("getprop ro.build.version.sdk"))
         return self._sdk_api_level
 
     @property
     def is_bootloader(self):
-        """True if the device is in bootloader mode.
-        """
+        """True if the device is in bootloader mode."""
         return self.serial in list_fastboot_devices()
 
     @property
     def is_adb_root(self):
-        """True if adb is running as root for this device.
-        """
+        """True if adb is running as root for this device."""
         try:
             return "0" == self.adb.shell("id -u")
         except AdbError:
@@ -603,9 +613,9 @@
             out = self.fastboot.getvar("product").strip()
             # "out" is never empty because of the "total time" message fastboot
             # writes to stderr.
-            lines = out.split('\n', 1)
+            lines = out.split("\n", 1)
             if lines:
-                tokens = lines[0].split(' ')
+                tokens = lines[0].split(" ")
                 if len(tokens) > 1:
                     return tokens[1].lower()
             return None
@@ -634,8 +644,7 @@
         """Returns the event dispatcher of the first Sl4aSession created."""
         if len(self._sl4a_manager.sessions) > 0:
             session_id = sorted(self._sl4a_manager.sessions.keys())[0]
-            return self._sl4a_manager.sessions[
-                session_id].get_event_dispatcher()
+            return self._sl4a_manager.sessions[session_id].get_event_dispatcher()
         else:
             return None
 
@@ -646,8 +655,7 @@
 
     @property
     def is_adb_logcat_on(self):
-        """Whether there is an ongoing adb logcat collection.
-        """
+        """Whether there is an ongoing adb logcat collection."""
         if self.adb_logcat_process:
             if self.adb_logcat_process.is_running():
                 return True
@@ -656,7 +664,7 @@
                 # if logcat died due to device reboot and sl4a session has
                 # not restarted there is no droid.
                 if self.droid:
-                    self.droid.logI('Logcat died')
+                    self.droid.logI("Logcat died")
                 self.log.info("Logcat to %s died", self.log_path)
                 return False
         return False
@@ -686,9 +694,9 @@
             # skip_sl4a value can be reset from config file
             if hasattr(self, k) and k != "skip_sl4a":
                 raise errors.AndroidDeviceError(
-                    "Attempting to set existing attribute %s on %s" %
-                    (k, self.serial),
-                    serial=self.serial)
+                    "Attempting to set existing attribute %s on %s" % (k, self.serial),
+                    serial=self.serial,
+                )
             setattr(self, k, v)
 
     def root_adb(self):
@@ -702,7 +710,7 @@
 
         for attempt in range(ADB_ROOT_RETRY_COUNT):
             try:
-                self.log.debug('Enabling ADB root mode: attempt %d.' % attempt)
+                self.log.debug("Enabling ADB root mode: attempt %d." % attempt)
                 self.adb.root()
             except AdbError:
                 if attempt == ADB_ROOT_RETRY_COUNT:
@@ -737,12 +745,15 @@
             >>> droid, ed = ad.get_droid()
         """
         self.log.debug(
-            "Creating RPC client_port={}, forwarded_port={}, server_port={}".
-            format(self.client_port, self.forwarded_port, self.server_port))
+            "Creating RPC client_port={}, forwarded_port={}, server_port={}".format(
+                self.client_port, self.forwarded_port, self.server_port
+            )
+        )
         session = self._sl4a_manager.create_session(
             client_port=self.client_port,
             forwarded_port=self.forwarded_port,
-            server_port=self.server_port)
+            server_port=self.server_port,
+        )
         droid = session.rpc_client
         if handle_event:
             ed = session.get_event_dispatcher()
@@ -762,24 +773,31 @@
         """
         for cmd in ("ps -A", "ps"):
             try:
-                out = self.adb.shell('%s | grep "S %s"' % (cmd, package_name),
-                                     ignore_status=True)
+                out = self.adb.shell(
+                    '%s | grep "S %s"' % (cmd, package_name), ignore_status=True
+                )
                 if package_name not in out:
                     continue
                 try:
                     pid = int(out.split()[1])
-                    self.log.info('apk %s has pid %s.', package_name, pid)
+                    self.log.info("apk %s has pid %s.", package_name, pid)
                     return pid
                 except (IndexError, ValueError) as e:
                     # Possible ValueError from string to int cast.
                     # Possible IndexError from split.
                     self.log.warning(
-                        'Command \"%s\" returned output line: '
-                        '\"%s\".\nError: %s', cmd, out, e)
+                        'Command "%s" returned output line: ' '"%s".\nError: %s',
+                        cmd,
+                        out,
+                        e,
+                    )
             except Exception as e:
                 self.log.warning(
-                    'Device fails to check if %s running with \"%s\"\n'
-                    'Exception %s', package_name, cmd, e)
+                    'Device fails to check if %s running with "%s"\n' "Exception %s",
+                    package_name,
+                    cmd,
+                    e,
+                )
         self.log.debug("apk %s is not running", package_name)
         return None
 
@@ -795,17 +813,11 @@
         return self._sl4a_manager.sessions[droid.uid].get_event_dispatcher()
 
     def _is_timestamp_in_range(self, target, log_begin_time, log_end_time):
-        low = acts_logger.logline_timestamp_comparator(log_begin_time,
-                                                       target) <= 0
-        high = acts_logger.logline_timestamp_comparator(log_end_time,
-                                                        target) >= 0
+        low = acts_logger.logline_timestamp_comparator(log_begin_time, target) <= 0
+        high = acts_logger.logline_timestamp_comparator(log_end_time, target) >= 0
         return low and high
 
-    def cat_adb_log(self,
-                    tag,
-                    begin_time,
-                    end_time=None,
-                    dest_path="AdbLogExcerpts"):
+    def cat_adb_log(self, tag, begin_time, end_time=None, dest_path="AdbLogExcerpts"):
         """Takes an excerpt of the adb logcat log from a certain time point to
         current time.
 
@@ -821,21 +833,24 @@
         else:
             log_end_time = acts_logger.epoch_to_log_line_timestamp(end_time)
         self.log.debug("Extracting adb log from logcat.")
-        logcat_path = os.path.join(self.device_log_path,
-                                   'adblog_%s_debug.txt' % self.serial)
+        logcat_path = os.path.join(
+            self.device_log_path, "adblog_%s_debug.txt" % self.serial
+        )
         if not os.path.exists(logcat_path):
             self.log.warning("Logcat file %s does not exist." % logcat_path)
             return
         adb_excerpt_dir = os.path.join(self.log_path, dest_path)
         os.makedirs(adb_excerpt_dir, exist_ok=True)
-        out_name = '%s,%s.txt' % (acts_logger.normalize_log_line_timestamp(
-            log_begin_time), self.serial)
+        out_name = "%s,%s.txt" % (
+            acts_logger.normalize_log_line_timestamp(log_begin_time),
+            self.serial,
+        )
         tag_len = utils.MAX_FILENAME_LEN - len(out_name)
-        out_name = '%s,%s' % (tag[:tag_len], out_name)
+        out_name = "%s,%s" % (tag[:tag_len], out_name)
         adb_excerpt_path = os.path.join(adb_excerpt_dir, out_name)
-        with open(adb_excerpt_path, 'w', encoding='utf-8') as out:
+        with open(adb_excerpt_path, "w", encoding="utf-8") as out:
             in_file = logcat_path
-            with open(in_file, 'r', encoding='utf-8', errors='replace') as f:
+            with open(in_file, "r", encoding="utf-8", errors="replace") as f:
                 while True:
                     line = None
                     try:
@@ -844,21 +859,20 @@
                             break
                     except:
                         continue
-                    line_time = line[:acts_logger.log_line_timestamp_len]
+                    line_time = line[: acts_logger.log_line_timestamp_len]
                     if not acts_logger.is_valid_logline_timestamp(line_time):
                         continue
-                    if self._is_timestamp_in_range(line_time, log_begin_time,
-                                                   log_end_time):
-                        if not line.endswith('\n'):
-                            line += '\n'
+                    if self._is_timestamp_in_range(
+                        line_time, log_begin_time, log_end_time
+                    ):
+                        if not line.endswith("\n"):
+                            line += "\n"
                         out.write(line)
         return adb_excerpt_path
 
-    def search_logcat(self,
-                      matching_string,
-                      begin_time=None,
-                      end_time=None,
-                      logcat_path=None):
+    def search_logcat(
+        self, matching_string, begin_time=None, end_time=None, logcat_path=None
+    ):
         """Search logcat message with given string.
 
         Args:
@@ -887,29 +901,27 @@
               "message_id": "0853"}]
         """
         if not logcat_path:
-            logcat_path = os.path.join(self.device_log_path,
-                                       'adblog_%s_debug.txt' % self.serial)
+            logcat_path = os.path.join(
+                self.device_log_path, "adblog_%s_debug.txt" % self.serial
+            )
         if not os.path.exists(logcat_path):
             self.log.warning("Logcat file %s does not exist." % logcat_path)
             return
-        output = job.run("grep '%s' %s" % (matching_string, logcat_path),
-                         ignore_status=True)
+        output = job.run(
+            "grep '%s' %s" % (matching_string, logcat_path), ignore_status=True
+        )
         if not output.stdout or output.exit_status != 0:
             return []
         if begin_time:
             if not isinstance(begin_time, datetime):
-                log_begin_time = acts_logger.epoch_to_log_line_timestamp(
-                    begin_time)
-                begin_time = datetime.strptime(log_begin_time,
-                                               "%Y-%m-%d %H:%M:%S.%f")
+                log_begin_time = acts_logger.epoch_to_log_line_timestamp(begin_time)
+                begin_time = datetime.strptime(log_begin_time, "%Y-%m-%d %H:%M:%S.%f")
         if end_time:
             if not isinstance(end_time, datetime):
-                log_end_time = acts_logger.epoch_to_log_line_timestamp(
-                    end_time)
-                end_time = datetime.strptime(log_end_time,
-                                             "%Y-%m-%d %H:%M:%S.%f")
+                log_end_time = acts_logger.epoch_to_log_line_timestamp(end_time)
+                end_time = datetime.strptime(log_end_time, "%Y-%m-%d %H:%M:%S.%f")
         result = []
-        logs = re.findall(r'(\S+\s\S+)(.*)', output.stdout)
+        logs = re.findall(r"(\S+\s\S+)(.*)", output.stdout)
         for log in logs:
             time_stamp = log[0]
             time_obj = datetime.strptime(time_stamp, "%Y-%m-%d %H:%M:%S.%f")
@@ -920,18 +932,20 @@
             if end_time and time_obj > end_time:
                 continue
 
-            res = re.findall(r'.*\[(\d+)\]', log[1])
+            res = re.findall(r".*\[(\d+)\]", log[1])
             try:
                 message_id = res[0]
             except:
                 message_id = None
 
-            result.append({
-                "log_message": "".join(log),
-                "time_stamp": time_stamp,
-                "datetime_obj": time_obj,
-                "message_id": message_id
-            })
+            result.append(
+                {
+                    "log_message": "".join(log),
+                    "time_stamp": time_stamp,
+                    "datetime_obj": time_obj,
+                    "message_id": message_id,
+                }
+            )
         return result
 
     def start_adb_logcat(self):
@@ -940,29 +954,30 @@
         """
         if self.is_adb_logcat_on:
             self.log.warning(
-                'Android device %s already has a running adb logcat thread. ' %
-                self.serial)
+                "Android device %s already has a running adb logcat thread. "
+                % self.serial
+            )
             return
         # Disable adb log spam filter. Have to stop and clear settings first
         # because 'start' doesn't support --clear option before Android N.
         self.adb.shell("logpersist.stop --clear", ignore_status=True)
         self.adb.shell("logpersist.start", ignore_status=True)
-        if hasattr(self, 'adb_logcat_param'):
+        if hasattr(self, "adb_logcat_param"):
             extra_params = self.adb_logcat_param
         else:
             extra_params = "-b all"
 
         self.adb_logcat_process = logcat.create_logcat_keepalive_process(
-            self.serial, self.log_dir, extra_params)
+            self.serial, self.log_dir, extra_params
+        )
         self.adb_logcat_process.start()
 
     def stop_adb_logcat(self):
-        """Stops the adb logcat collection subprocess.
-        """
+        """Stops the adb logcat collection subprocess."""
         if not self.is_adb_logcat_on:
             self.log.warning(
-                'Android device %s does not have an ongoing adb logcat ' %
-                self.serial)
+                "Android device %s does not have an ongoing adb logcat " % self.serial
+            )
             return
         # Set the last timestamp to the current timestamp. This may cause
         # a race condition that allows the same line to be logged twice,
@@ -979,8 +994,9 @@
         Returns:
         Linux UID for the apk.
         """
-        output = self.adb.shell("dumpsys package %s | grep userId=" % apk_name,
-                                ignore_status=True)
+        output = self.adb.shell(
+            "dumpsys package %s | grep userId=" % apk_name, ignore_status=True
+        )
         result = re.search(r"userId=(\d+)", output)
         if result:
             return result.group(1)
@@ -997,15 +1013,17 @@
             Version of the given apk.
         """
         try:
-            output = self.adb.shell("dumpsys package %s | grep versionName" %
-                                    package_name)
+            output = self.adb.shell(
+                "dumpsys package %s | grep versionName" % package_name
+            )
             pattern = re.compile(r"versionName=(.+)", re.I)
             result = pattern.findall(output)
             if result:
                 return result[0]
         except Exception as e:
-            self.log.warning("Fail to get the version of package %s: %s",
-                             package_name, e)
+            self.log.warning(
+                "Fail to get the version of package %s: %s", package_name, e
+            )
         self.log.debug("apk %s is not found", package_name)
         return None
 
@@ -1022,13 +1040,16 @@
         try:
             return bool(
                 self.adb.shell(
-                    '(pm list packages | grep -w "package:%s") || true' %
-                    package_name))
+                    '(pm list packages | grep -w "package:%s") || true' % package_name
+                )
+            )
 
         except Exception as err:
             self.log.error(
-                'Could not determine if %s is installed. '
-                'Received error:\n%s', package_name, err)
+                "Could not determine if %s is installed. " "Received error:\n%s",
+                package_name,
+                err,
+            )
             return False
 
     def is_sl4a_installed(self):
@@ -1045,15 +1066,19 @@
         """
         for cmd in ("ps -A", "ps"):
             try:
-                out = self.adb.shell('%s | grep "S %s"' % (cmd, package_name),
-                                     ignore_status=True)
+                out = self.adb.shell(
+                    '%s | grep "S %s"' % (cmd, package_name), ignore_status=True
+                )
                 if package_name in out:
                     self.log.info("apk %s is running", package_name)
                     return True
             except Exception as e:
                 self.log.warning(
-                    "Device fails to check is %s running by %s "
-                    "Exception %s", package_name, cmd, e)
+                    "Device fails to check is %s running by %s " "Exception %s",
+                    package_name,
+                    cmd,
+                    e,
+                )
                 continue
         self.log.debug("apk %s is not running", package_name)
         return False
@@ -1071,8 +1096,7 @@
         True if package is installed. False otherwise.
         """
         try:
-            self.adb.shell('am force-stop %s' % package_name,
-                           ignore_status=True)
+            self.adb.shell("am force-stop %s" % package_name, ignore_status=True)
         except Exception as e:
             self.log.warning("Fail to stop package %s: %s", package_name, e)
 
@@ -1098,7 +1122,8 @@
         os.makedirs(br_path, exist_ok=True)
         epoch = begin_time if begin_time else utils.get_current_epoch_time()
         time_stamp = acts_logger.normalize_log_line_timestamp(
-            acts_logger.epoch_to_log_line_timestamp(epoch))
+            acts_logger.epoch_to_log_line_timestamp(epoch)
+        )
         out_name = "AndroidDevice%s_%s" % (self.serial, time_stamp)
         out_name = "%s.zip" % out_name if new_br else "%s.txt" % out_name
         full_out_path = os.path.join(br_path, out_name)
@@ -1112,25 +1137,24 @@
             out = self.adb.shell("bugreportz", timeout=BUG_REPORT_TIMEOUT)
             if not out.startswith("OK"):
                 raise errors.AndroidDeviceError(
-                    'Failed to take bugreport on %s: %s' % (self.serial, out),
-                    serial=self.serial)
-            br_out_path = out.split(':')[1].strip().split()[0]
+                    "Failed to take bugreport on %s: %s" % (self.serial, out),
+                    serial=self.serial,
+                )
+            br_out_path = out.split(":")[1].strip().split()[0]
             self.adb.pull("%s %s" % (br_out_path, full_out_path))
         else:
-            self.adb.bugreport(" > {}".format(full_out_path),
-                               timeout=BUG_REPORT_TIMEOUT)
+            self.adb.bugreport(
+                " > {}".format(full_out_path), timeout=BUG_REPORT_TIMEOUT
+            )
         if test_name:
-            self.log.info("Bugreport for %s taken at %s.", test_name,
-                          full_out_path)
+            self.log.info("Bugreport for %s taken at %s.", test_name, full_out_path)
         else:
             self.log.info("Bugreport taken at %s.", test_name, full_out_path)
         self.adb.wait_for_device(timeout=WAIT_FOR_DEVICE_TIMEOUT)
 
-    def get_file_names(self,
-                       directory,
-                       begin_time=None,
-                       skip_files=[],
-                       match_string=None):
+    def get_file_names(
+        self, directory, begin_time=None, skip_files=[], match_string=None
+    ):
         """Get files names with provided directory."""
         cmd = "find %s -type f" % directory
         if begin_time:
@@ -1142,8 +1166,12 @@
         for skip_file in skip_files:
             cmd = "%s ! -iname %s" % (cmd, skip_file)
         out = self.adb.shell(cmd, ignore_status=True)
-        if not out or "No such" in out or "Permission denied" in out or \
-            "Not a directory" in out:
+        if (
+            not out
+            or "No such" in out
+            or "Permission denied" in out
+            or "Not a directory" in out
+        ):
             return []
         files = out.split("\n")
         self.log.debug("Find files in directory %s: %s", directory, files)
@@ -1154,7 +1182,7 @@
         """
         The $EXTERNAL_STORAGE path on the device. Most commonly set to '/sdcard'
         """
-        return self.adb.shell('echo $EXTERNAL_STORAGE')
+        return self.adb.shell("echo $EXTERNAL_STORAGE")
 
     def file_exists(self, file_path):
         """Returns whether a file exists on a device.
@@ -1162,14 +1190,16 @@
         Args:
             file_path: The path of the file to check for.
         """
-        cmd = '(test -f %s && echo yes) || echo no' % file_path
+        cmd = "(test -f %s && echo yes) || echo no" % file_path
         result = self.adb.shell(cmd)
-        if result == 'yes':
+        if result == "yes":
             return True
-        elif result == 'no':
+        elif result == "no":
             return False
-        raise ValueError('Couldn\'t determine if %s exists. '
-                         'Expected yes/no, got %s' % (file_path, result[cmd]))
+        raise ValueError(
+            "Couldn't determine if %s exists. "
+            "Expected yes/no, got %s" % (file_path, result[cmd])
+        )
 
     def pull_files(self, device_paths, host_path=None):
         """Pull files from devices.
@@ -1183,39 +1213,37 @@
         if not host_path:
             host_path = self.log_path
         for device_path in device_paths:
-            self.log.info('Pull from device: %s -> %s' %
-                          (device_path, host_path))
-            self.adb.pull("%s %s" % (device_path, host_path),
-                          timeout=PULL_TIMEOUT)
+            self.log.info("Pull from device: %s -> %s" % (device_path, host_path))
+            self.adb.pull("%s %s" % (device_path, host_path), timeout=PULL_TIMEOUT)
 
-    def check_crash_report(self,
-                           test_name=None,
-                           begin_time=None,
-                           log_crash_report=False):
+    def check_crash_report(
+        self, test_name=None, begin_time=None, log_crash_report=False
+    ):
         """check crash report on the device."""
         crash_reports = []
         for crash_path in CRASH_REPORT_PATHS:
             try:
-                cmd = 'cd %s' % crash_path
+                cmd = "cd %s" % crash_path
                 self.adb.shell(cmd)
             except Exception as e:
                 self.log.debug("received exception %s", e)
                 continue
-            crashes = self.get_file_names(crash_path,
-                                          skip_files=CRASH_REPORT_SKIPS,
-                                          begin_time=begin_time)
+            crashes = self.get_file_names(
+                crash_path, skip_files=CRASH_REPORT_SKIPS, begin_time=begin_time
+            )
             if crash_path == "/data/tombstones/" and crashes:
                 tombstones = crashes[:]
                 for tombstone in tombstones:
                     if self.adb.shell(
-                            'cat %s | grep "crash_dump failed to dump process"'
-                            % tombstone):
+                        'cat %s | grep "crash_dump failed to dump process"' % tombstone
+                    ):
                         crashes.remove(tombstone)
             if crashes:
                 crash_reports.extend(crashes)
         if crash_reports and log_crash_report:
-            crash_log_path = os.path.join(self.device_log_path,
-                                          "Crashes_%s" % self.serial)
+            crash_log_path = os.path.join(
+                self.device_log_path, "Crashes_%s" % self.serial
+            )
             os.makedirs(crash_log_path, exist_ok=True)
             self.pull_files(crash_reports, crash_log_path)
         return crash_reports
@@ -1225,35 +1253,38 @@
         # Sleep 10 seconds for the buffered log to be written in qxdm log file
         time.sleep(10)
         log_path = getattr(self, "qxdm_log_path", DEFAULT_QXDM_LOG_PATH)
-        qxdm_logs = self.get_file_names(log_path,
-                                        begin_time=begin_time,
-                                        match_string="*.qmdl")
+        qxdm_logs = self.get_file_names(
+            log_path, begin_time=begin_time, match_string="*.qmdl"
+        )
         if qxdm_logs:
-            qxdm_log_path = os.path.join(self.device_log_path,
-                                         "QXDM_%s" % self.serial)
+            qxdm_log_path = os.path.join(self.device_log_path, "QXDM_%s" % self.serial)
             os.makedirs(qxdm_log_path, exist_ok=True)
 
             self.log.info("Pull QXDM Log %s to %s", qxdm_logs, qxdm_log_path)
             self.pull_files(qxdm_logs, qxdm_log_path)
 
-            self.adb.pull("/firmware/image/qdsp6m.qdb %s" % qxdm_log_path,
-                          timeout=PULL_TIMEOUT,
-                          ignore_status=True)
+            self.adb.pull(
+                "/firmware/image/qdsp6m.qdb %s" % qxdm_log_path,
+                timeout=PULL_TIMEOUT,
+                ignore_status=True,
+            )
             # Zip Folder
-            utils.zip_directory('%s.zip' % qxdm_log_path, qxdm_log_path)
+            utils.zip_directory("%s.zip" % qxdm_log_path, qxdm_log_path)
             shutil.rmtree(qxdm_log_path)
         else:
             self.log.error("Didn't find QXDM logs in %s." % log_path)
         if "Verizon" in self.adb.getprop("gsm.sim.operator.alpha"):
-            omadm_log_path = os.path.join(self.device_log_path,
-                                          "OMADM_%s" % self.serial)
+            omadm_log_path = os.path.join(
+                self.device_log_path, "OMADM_%s" % self.serial
+            )
             os.makedirs(omadm_log_path, exist_ok=True)
             self.log.info("Pull OMADM Log")
             self.adb.pull(
-                "/data/data/com.android.omadm.service/files/dm/log/ %s" %
-                omadm_log_path,
+                "/data/data/com.android.omadm.service/files/dm/log/ %s"
+                % omadm_log_path,
                 timeout=PULL_TIMEOUT,
-                ignore_status=True)
+                ignore_status=True,
+            )
 
     def get_sdm_logs(self, test_name="", begin_time=None):
         """Get sdm logs."""
@@ -1261,31 +1292,32 @@
         time.sleep(10)
         log_paths = [
             ALWAYS_ON_LOG_PATH,
-            getattr(self, "sdm_log_path", DEFAULT_SDM_LOG_PATH)
+            getattr(self, "sdm_log_path", DEFAULT_SDM_LOG_PATH),
         ]
         sdm_logs = []
         for path in log_paths:
-            sdm_logs += self.get_file_names(path,
-                                            begin_time=begin_time,
-                                            match_string="*.sdm*")
+            sdm_logs += self.get_file_names(
+                path, begin_time=begin_time, match_string="*.sdm*"
+            )
         if sdm_logs:
-            sdm_log_path = os.path.join(self.device_log_path,
-                                        "SDM_%s" % self.serial)
+            sdm_log_path = os.path.join(self.device_log_path, "SDM_%s" % self.serial)
             os.makedirs(sdm_log_path, exist_ok=True)
             self.log.info("Pull SDM Log %s to %s", sdm_logs, sdm_log_path)
             self.pull_files(sdm_logs, sdm_log_path)
         else:
             self.log.error("Didn't find SDM logs in %s." % log_paths)
         if "Verizon" in self.adb.getprop("gsm.sim.operator.alpha"):
-            omadm_log_path = os.path.join(self.device_log_path,
-                                          "OMADM_%s" % self.serial)
+            omadm_log_path = os.path.join(
+                self.device_log_path, "OMADM_%s" % self.serial
+            )
             os.makedirs(omadm_log_path, exist_ok=True)
             self.log.info("Pull OMADM Log")
             self.adb.pull(
-                "/data/data/com.android.omadm.service/files/dm/log/ %s" %
-                omadm_log_path,
+                "/data/data/com.android.omadm.service/files/dm/log/ %s"
+                % omadm_log_path,
                 timeout=PULL_TIMEOUT,
-                ignore_status=True)
+                ignore_status=True,
+            )
 
     def start_new_session(self, max_connections=None, server_port=None):
         """Start a new session in sl4a.
@@ -1301,7 +1333,8 @@
             existing uid to a new session.
         """
         session = self._sl4a_manager.create_session(
-            max_connections=max_connections, server_port=server_port)
+            max_connections=max_connections, server_port=server_port
+        )
 
         self._sl4a_manager.sessions[session.uid] = session
         return session.rpc_client
@@ -1313,11 +1346,9 @@
         """
         self._sl4a_manager.terminate_all_sessions()
 
-    def run_iperf_client_nb(self,
-                            server_host,
-                            extra_args="",
-                            timeout=IPERF_TIMEOUT,
-                            log_file_path=None):
+    def run_iperf_client_nb(
+        self, server_host, extra_args="", timeout=IPERF_TIMEOUT, log_file_path=None
+    ):
         """Start iperf client on the device asynchronously.
 
         Return status as true if iperf client start successfully.
@@ -1335,10 +1366,7 @@
             cmd += " --logfile {} &".format(log_file_path)
         self.adb.shell_nb(cmd)
 
-    def run_iperf_client(self,
-                         server_host,
-                         extra_args="",
-                         timeout=IPERF_TIMEOUT):
+    def run_iperf_client(self, server_host, extra_args="", timeout=IPERF_TIMEOUT):
         """Start iperf client on the device.
 
         Return status as true if iperf client start successfully.
@@ -1353,9 +1381,10 @@
             status: true if iperf client start successfully.
             results: results have data flow information
         """
-        out = self.adb.shell("iperf3 -c {} {}".format(server_host, extra_args),
-                             timeout=timeout)
-        clean_out = out.split('\n')
+        out = self.adb.shell(
+            "iperf3 -c {} {}".format(server_host, extra_args), timeout=timeout
+        )
+        clean_out = out.split("\n")
         if "error" in clean_out[0].lower():
             return False, clean_out
         return True, clean_out
@@ -1373,7 +1402,7 @@
             results: results have output of command
         """
         out = self.adb.shell("iperf3 -s {}".format(extra_args))
-        clean_out = out.split('\n')
+        clean_out = out.split("\n")
         if "error" in clean_out[0].lower():
             return False, clean_out
         return True, clean_out
@@ -1393,7 +1422,7 @@
         while time.time() < timeout_start + timeout:
             try:
                 completed = self.adb.getprop("sys.boot_completed")
-                if completed == '1':
+                if completed == "1":
                     self.log.debug("Device has rebooted")
                     return
             except AdbError:
@@ -1402,13 +1431,12 @@
                 pass
             time.sleep(5)
         raise errors.AndroidDeviceError(
-            'Device %s booting process timed out.' % self.serial,
-            serial=self.serial)
+            "Device %s booting process timed out." % self.serial, serial=self.serial
+        )
 
-    def reboot(self,
-               stop_at_lock_screen=False,
-               timeout=180,
-               wait_after_reboot_complete=1):
+    def reboot(
+        self, stop_at_lock_screen=False, timeout=180, wait_after_reboot_complete=1
+    ):
         """Reboots the device.
 
         Terminate all sl4a sessions, reboot the device, wait for device to
@@ -1438,16 +1466,15 @@
         while time.time() < timeout_start + timeout:
             try:
                 self.adb.get_state()
-                time.sleep(.1)
+                time.sleep(0.1)
             except AdbError:
                 # get_state will raise an error if the device is not found. We
                 # want the device to be missing to prove the device has kicked
                 # off the reboot.
                 break
-        self.wait_for_boot_completion(timeout=(timeout - time.time() +
-                                               timeout_start))
+        self.wait_for_boot_completion(timeout=(timeout - time.time() + timeout_start))
 
-        self.log.debug('Wait for a while after boot completion.')
+        self.log.debug("Wait for a while after boot completion.")
         time.sleep(wait_after_reboot_complete)
         self.root_adb()
         skip_sl4a = self.skip_sl4a
@@ -1473,19 +1500,18 @@
 
         self.start_services()
 
-    def get_ipv4_address(self, interface='wlan0', timeout=5):
+    def get_ipv4_address(self, interface="wlan0", timeout=5):
         for timer in range(0, timeout):
             try:
-                ip_string = self.adb.shell('ifconfig %s|grep inet' % interface)
+                ip_string = self.adb.shell("ifconfig %s|grep inet" % interface)
                 break
             except adb.AdbError as e:
                 if timer + 1 == timeout:
-                    self.log.warning('Unable to find IP address for %s.' %
-                                     interface)
+                    self.log.warning("Unable to find IP address for %s." % interface)
                     return None
                 else:
                     time.sleep(1)
-        result = re.search('addr:(.*) Bcast', ip_string)
+        result = re.search("addr:(.*) Bcast", ip_string)
         if result != None:
             ip_address = result.group(1)
             try:
@@ -1499,16 +1525,15 @@
     def get_ipv4_gateway(self, timeout=5):
         for timer in range(0, timeout):
             try:
-                gateway_string = self.adb.shell(
-                    'dumpsys wifi | grep mDhcpResults')
+                gateway_string = self.adb.shell("dumpsys wifi | grep mDhcpResults")
                 break
             except adb.AdbError as e:
                 if timer + 1 == timeout:
-                    self.log.warning('Unable to find gateway')
+                    self.log.warning("Unable to find gateway")
                     return None
                 else:
                     time.sleep(1)
-        result = re.search('Gateway (.*) DNS servers', gateway_string)
+        result = re.search("Gateway (.*) DNS servers", gateway_string)
         if result != None:
             ipv4_gateway = result.group(1)
             try:
@@ -1525,28 +1550,33 @@
     def get_my_current_focus_window(self):
         """Get the current focus window on screen"""
         output = self.adb.shell(
-            'dumpsys window displays | grep -E mCurrentFocus | grep -v null',
-            ignore_status=True)
+            "dumpsys window displays | grep -E mCurrentFocus | grep -v null",
+            ignore_status=True,
+        )
         if not output or "not found" in output or "Can't find" in output:
-            result = ''
+            result = ""
         else:
-            result = output.split(' ')[-1].strip("}")
+            result = output.split(" ")[-1].strip("}")
         self.log.debug("Current focus window is %s", result)
         return result
 
     def get_my_current_focus_app(self):
         """Get the current focus application"""
         dumpsys_cmd = [
-            'dumpsys window | grep -E mFocusedApp',
-            'dumpsys window displays | grep -E mFocusedApp'
+            "dumpsys window | grep -E mFocusedApp",
+            "dumpsys window displays | grep -E mFocusedApp",
         ]
         for cmd in dumpsys_cmd:
             output = self.adb.shell(cmd, ignore_status=True)
-            if not output or "not found" in output or "Can't find" in output or (
-                    "mFocusedApp=null" in output):
-                result = ''
+            if (
+                not output
+                or "not found" in output
+                or "Can't find" in output
+                or ("mFocusedApp=null" in output)
+            ):
+                result = ""
             else:
-                result = output.split(' ')[-2]
+                result = output.split(" ")[-2]
                 break
         self.log.debug("Current focus app is %s", result)
         return result
@@ -1557,18 +1587,16 @@
             return window_name in current_window
         return current_window and ENCRYPTION_WINDOW not in current_window
 
-    def wait_for_window_ready(self,
-                              window_name=None,
-                              check_interval=5,
-                              check_duration=60):
+    def wait_for_window_ready(
+        self, window_name=None, check_interval=5, check_duration=60
+    ):
         elapsed_time = 0
         while elapsed_time < check_duration:
             if self.is_window_ready(window_name=window_name):
                 return True
             time.sleep(check_interval)
             elapsed_time += check_interval
-        self.log.info("Current focus window is %s",
-                      self.get_my_current_focus_window())
+        self.log.info("Current focus window is %s", self.get_my_current_focus_window())
         return False
 
     def is_user_setup_complete(self):
@@ -1592,7 +1620,7 @@
 
     def is_screen_lock_enabled(self):
         """Check if screen lock is enabled"""
-        cmd = ("dumpsys window policy | grep showing=")
+        cmd = "dumpsys window policy | grep showing="
         out = self.adb.shell(cmd, ignore_status=True)
         return "true" in out
 
@@ -1604,7 +1632,8 @@
             self.log.info("Device is in CrpytKeeper window")
             return True
         if "StatusBar" in current_window and (
-            (not current_app) or "FallbackHome" in current_app):
+            (not current_app) or "FallbackHome" in current_app
+        ):
             self.log.info("Device is locked")
             return True
         return False
@@ -1618,8 +1647,10 @@
                 if self.is_waiting_for_unlock_pin():
                     self.unlock_screen(password=DEFAULT_DEVICE_PASSWORD)
                     time.sleep(1)
-                if not self.is_waiting_for_unlock_pin(
-                ) and self.wait_for_window_ready():
+                if (
+                    not self.is_waiting_for_unlock_pin()
+                    and self.wait_for_window_ready()
+                ):
                     return True
             return False
         else:
@@ -1671,16 +1702,19 @@
 
     def exit_setup_wizard(self):
         # Handling Android TV's setupwizard is ignored for now.
-        if 'feature:android.hardware.type.television' in self.adb.shell(
-                'pm list features'):
+        if "feature:android.hardware.type.television" in self.adb.shell(
+            "pm list features"
+        ):
             return
         if not self.is_user_setup_complete() or self.is_setupwizard_on():
             # b/116709539 need this to prevent reboot after skip setup wizard
-            self.adb.shell("am start -a com.android.setupwizard.EXIT",
-                           ignore_status=True)
-            self.adb.shell("pm disable %s" %
-                           self.get_setupwizard_package_name(),
-                           ignore_status=True)
+            self.adb.shell(
+                "am start -a com.android.setupwizard.EXIT", ignore_status=True
+            )
+            self.adb.shell(
+                "pm disable %s" % self.get_setupwizard_package_name(),
+                ignore_status=True,
+            )
         # Wait up to 5 seconds for user_setup_complete to be updated
         end_time = time.time() + 5
         while time.time() < end_time:
@@ -1705,9 +1739,11 @@
         android_package_name = "com.google.android"
         package = self.adb.shell(
             "pm list packages -f | grep -E {} | grep {}".format(
-                packages_to_skip, android_package_name))
-        wizard_package = package.split('=')[1]
-        activity = package.split('=')[0].split('/')[-2]
+                packages_to_skip, android_package_name
+            )
+        )
+        wizard_package = package.split("=")[1]
+        activity = package.split("=")[0].split("/")[-2]
         self.log.info("%s/.%sActivity" % (wizard_package, activity))
         return "%s/.%sActivity" % (wizard_package, activity)
 
@@ -1728,16 +1764,25 @@
         try:
             self.ensure_verity_disabled()
             self.adb.remount()
-            out = self.adb.push('%s %s' % (src_file_path, dst_file_path),
-                                timeout=push_timeout)
-            if 'error' in out:
-                self.log.error('Unable to push system file %s to %s due to %s',
-                               src_file_path, dst_file_path, out)
+            out = self.adb.push(
+                "%s %s" % (src_file_path, dst_file_path), timeout=push_timeout
+            )
+            if "error" in out:
+                self.log.error(
+                    "Unable to push system file %s to %s due to %s",
+                    src_file_path,
+                    dst_file_path,
+                    out,
+                )
                 return False
             return True
         except Exception as e:
-            self.log.error('Unable to push system file %s to %s due to %s',
-                           src_file_path, dst_file_path, e)
+            self.log.error(
+                "Unable to push system file %s to %s due to %s",
+                src_file_path,
+                dst_file_path,
+                e,
+            )
             return False
 
     def ensure_verity_enabled(self):
@@ -1748,8 +1793,8 @@
         """
         user = self.adb.get_user_id()
         # The below properties will only exist if verity has been enabled.
-        system_verity = self.adb.getprop('partition.system.verified')
-        vendor_verity = self.adb.getprop('partition.vendor.verified')
+        system_verity = self.adb.getprop("partition.system.verified")
+        vendor_verity = self.adb.getprop("partition.vendor.verified")
         if not system_verity or not vendor_verity:
             self.adb.ensure_root()
             self.adb.enable_verity()
@@ -1763,8 +1808,8 @@
         """
         user = self.adb.get_user_id()
         # The below properties will only exist if verity has been enabled.
-        system_verity = self.adb.getprop('partition.system.verified')
-        vendor_verity = self.adb.getprop('partition.vendor.verified')
+        system_verity = self.adb.getprop("partition.system.verified")
+        vendor_verity = self.adb.getprop("partition.vendor.verified")
         if system_verity or vendor_verity:
             self.adb.ensure_root()
             self.adb.disable_verity()
diff --git a/src/antlion/controllers/android_lib/android_api.py b/src/antlion/controllers/android_lib/android_api.py
deleted file mode 100644
index d58fe46..0000000
--- a/src/antlion/controllers/android_lib/android_api.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import logging
-import sys
-
-from antlion.controllers.android_device import AndroidDevice
-from antlion.libs import version_selector
-
-
-class AndroidApi:
-    OLDEST = 0
-    MINIMUM = 0
-    L = 21
-    L_MR1 = 22
-    M = 23
-    N = 24
-    N_MR1 = 25
-    O = 26
-    O_MR1 = 27
-    P = 28
-    LATEST = sys.maxsize
-    MAX = sys.maxsize
-
-
-def android_api(min_api=AndroidApi.OLDEST, max_api=AndroidApi.LATEST):
-    """Decorates a function to only be called for the given API range.
-
-    Only gets called if the AndroidDevice in the args is within the specified
-    API range. Otherwise, a different function may be called instead. If the
-    API level is out of range, and no other function handles that API level, an
-    error is raise instead.
-
-    Note: In Python3.5 and below, the order of kwargs is not preserved. If your
-          function contains multiple AndroidDevices within the kwargs, and no
-          AndroidDevices within args, you are NOT guaranteed the first
-          AndroidDevice is the same one chosen each time the function runs. Due
-          to this, we do not check for AndroidDevices in kwargs.
-
-    Args:
-         min_api: The minimum API level. Can be an int or an AndroidApi value.
-         max_api: The maximum API level. Can be an int or an AndroidApi value.
-    """
-
-    def get_api_level(*args, **_):
-        for arg in args:
-            if isinstance(arg, AndroidDevice):
-                return arg.sdk_api_level()
-        logging.getLogger().error(
-            'An AndroidDevice was not found in the given '
-            'arguments.')
-        return None
-
-    return version_selector.set_version(get_api_level, min_api, max_api)
diff --git a/src/antlion/controllers/android_lib/logcat.py b/src/antlion/controllers/android_lib/logcat.py
index 4e2c4fd..0a5e8f7 100644
--- a/src/antlion/controllers/android_lib/logcat.py
+++ b/src/antlion/controllers/android_lib/logcat.py
@@ -21,7 +21,7 @@
 from antlion.libs.logging import log_stream
 from antlion.libs.logging.log_stream import LogStyles
 
-TIMESTAMP_REGEX = r'((?:\d+-)?\d+-\d+ \d+:\d+:\d+.\d+)'
+TIMESTAMP_REGEX = r"((?:\d+-)?\d+-\d+ \d+:\d+:\d+.\d+)"
 
 
 class TimestampTracker(object):
@@ -43,17 +43,17 @@
 
 def _get_log_level(message):
     """Returns the log level for the given message."""
-    if message.startswith('-') or len(message) < 37:
+    if message.startswith("-") or len(message) < 37:
         return logging.ERROR
     else:
         log_level = message[36]
-        if log_level in ('V', 'D'):
+        if log_level in ("V", "D"):
             return logging.DEBUG
-        elif log_level == 'I':
+        elif log_level == "I":
             return logging.INFO
-        elif log_level == 'W':
+        elif log_level == "W":
             return logging.WARNING
-        elif log_level == 'E':
+        elif log_level == "E":
             return logging.ERROR
     return logging.NOTSET
 
@@ -71,15 +71,18 @@
 def _on_retry(serial, extra_params, timestamp_tracker):
     def on_retry(_):
         begin_at = '"%s"' % (timestamp_tracker.last_timestamp or 1)
-        additional_params = extra_params or ''
+        additional_params = extra_params or ""
 
-        return 'adb -s %s logcat -T %s -v year %s' % (
-            serial, begin_at, additional_params)
+        return "adb -s %s logcat -T %s -v year %s" % (
+            serial,
+            begin_at,
+            additional_params,
+        )
 
     return on_retry
 
 
-def create_logcat_keepalive_process(serial, logcat_dir, extra_params=''):
+def create_logcat_keepalive_process(serial, logcat_dir, extra_params=""):
     """Creates a Logcat Process that automatically attempts to reconnect.
 
     Args:
@@ -91,12 +94,15 @@
         A acts.libs.proc.process.Process object.
     """
     logger = log_stream.create_logger(
-        'adblog_%s' % serial, log_name=serial, subcontext=logcat_dir,
-        log_styles=(LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG))
-    process = Process('adb -s %s logcat -T 1 -v year %s' %
-                      (serial, extra_params))
+        "adblog_%s" % serial,
+        log_name=serial,
+        subcontext=logcat_dir,
+        log_styles=(LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG),
+    )
+    process = Process("adb -s %s logcat -T 1 -v year %s" % (serial, extra_params))
     timestamp_tracker = TimestampTracker()
     process.set_on_output_callback(_log_line_func(logger, timestamp_tracker))
     process.set_on_terminate_callback(
-        _on_retry(serial, extra_params, timestamp_tracker))
+        _on_retry(serial, extra_params, timestamp_tracker)
+    )
     return process
diff --git a/src/antlion/controllers/android_lib/services.py b/src/antlion/controllers/android_lib/services.py
index 6c5f334..098f524 100644
--- a/src/antlion/controllers/android_lib/services.py
+++ b/src/antlion/controllers/android_lib/services.py
@@ -14,7 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
 from antlion.controllers.android_lib import errors
 from antlion.controllers.android_lib import events as android_events
 from antlion.event import event_bus
@@ -44,14 +43,22 @@
         """Registers the _start and _stop methods to their corresponding
         events.
         """
+
         def check_serial(event):
             return self.serial == event.ad.serial
 
         self._registration_ids = [
-            event_bus.register(android_events.AndroidStartServicesEvent,
-                               self._start, filter_fn=check_serial),
-            event_bus.register(android_events.AndroidStopServicesEvent,
-                               self._stop, filter_fn=check_serial)]
+            event_bus.register(
+                android_events.AndroidStartServicesEvent,
+                self._start,
+                filter_fn=check_serial,
+            ),
+            event_bus.register(
+                android_events.AndroidStopServicesEvent,
+                self._stop,
+                filter_fn=check_serial,
+            ),
+        ]
 
     def unregister(self):
         """Unregisters all subscriptions in this service."""
@@ -93,14 +100,15 @@
             return
 
         if not self.ad.is_sl4a_installed():
-            self.ad.log.error('sl4a.apk is not installed')
+            self.ad.log.error("sl4a.apk is not installed")
             raise errors.AndroidDeviceError(
-                'The required sl4a.apk is not installed',
-                serial=self.serial)
+                "The required sl4a.apk is not installed", serial=self.serial
+            )
         if not self.ad.ensure_screen_on():
             self.ad.log.error("User window cannot come up")
             raise errors.AndroidDeviceError(
-                "User window cannot come up", serial=self.serial)
+                "User window cannot come up", serial=self.serial
+            )
 
         droid, ed = self.ad.get_droid()
         ed.start()
diff --git a/src/antlion/controllers/android_lib/tel/tel_utils.py b/src/antlion/controllers/android_lib/tel/tel_utils.py
index 0be8ef5..c18741c 100644
--- a/src/antlion/controllers/android_lib/tel/tel_utils.py
+++ b/src/antlion/controllers/android_lib/tel/tel_utils.py
@@ -13,7 +13,6 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
 """Generic telephony utility functions. Cloned from test_utils.tel."""
 
 import re
@@ -89,21 +88,27 @@
 
 
 def dumpsys_last_call_info(ad):
-    """ Get call information by dumpsys telecom. """
+    """Get call information by dumpsys telecom."""
     num = dumpsys_last_call_number(ad)
     output = ad.adb.shell("dumpsys telecom")
     result = re.search(r"Call TC@%s: {(.*?)}" % num, output, re.DOTALL)
     call_info = {"TC": num}
     if result:
         result = result.group(1)
-        for attr in ("startTime", "endTime", "direction", "isInterrupted",
-                     "callTechnologies", "callTerminationsReason",
-                     "isVideoCall", "callProperties"):
+        for attr in (
+            "startTime",
+            "endTime",
+            "direction",
+            "isInterrupted",
+            "callTechnologies",
+            "callTerminationsReason",
+            "isVideoCall",
+            "callProperties",
+        ):
             match = re.search(r"%s: (.*)" % attr, result)
             if match:
                 if attr in ("startTime", "endTime"):
-                    call_info[attr] = epoch_to_log_line_timestamp(
-                        int(match.group(1)))
+                    call_info[attr] = epoch_to_log_line_timestamp(int(match.group(1)))
                 else:
                     call_info[attr] = match.group(1)
     ad.log.debug("call_info = %s", call_info)
@@ -124,8 +129,7 @@
 
 
 def get_outgoing_voice_sub_id(ad):
-    """ Get outgoing voice subscription id
-    """
+    """Get outgoing voice subscription id"""
     if hasattr(ad, "outgoing_voice_sub_id"):
         return ad.outgoing_voice_sub_id
     else:
@@ -133,7 +137,7 @@
 
 
 def get_rx_tx_power_levels(log, ad):
-    """ Obtains Rx and Tx power levels from the MDS application.
+    """Obtains Rx and Tx power levels from the MDS application.
 
     The method requires the MDS app to be installed in the DUT.
 
@@ -146,36 +150,40 @@
         in Rx chain, and the second element is the transmitted power in dBm.
         Values for invalid Rx / Tx chains are set to None.
     """
-    cmd = ('am instrument -w -e request "80 00 e8 03 00 08 00 00 00" -e '
-           'response wait "com.google.mdstest/com.google.mdstest.instrument.'
-           'ModemCommandInstrumentation"')
+    cmd = (
+        'am instrument -w -e request "80 00 e8 03 00 08 00 00 00" -e '
+        'response wait "com.google.mdstest/com.google.mdstest.instrument.'
+        'ModemCommandInstrumentation"'
+    )
     try:
         output = ad.adb.shell(cmd)
     except AdbCommandError as e:
         log.error(e)
         output = None
 
-    if not output or 'result=SUCCESS' not in output:
-        raise RuntimeError('Could not obtain Tx/Rx power levels from MDS. Is '
-                           'the MDS app installed?')
+    if not output or "result=SUCCESS" not in output:
+        raise RuntimeError(
+            "Could not obtain Tx/Rx power levels from MDS. Is " "the MDS app installed?"
+        )
 
     response = re.search(r"(?<=response=).+", output)
 
     if not response:
-        raise RuntimeError('Invalid response from the MDS app:\n' + output)
+        raise RuntimeError("Invalid response from the MDS app:\n" + output)
 
     # Obtain a list of bytes in hex format from the response string
-    response_hex = response.group(0).split(' ')
+    response_hex = response.group(0).split(" ")
 
     def get_bool(pos):
-        """ Obtain a boolean variable from the byte array. """
-        return response_hex[pos] == '01'
+        """Obtain a boolean variable from the byte array."""
+        return response_hex[pos] == "01"
 
     def get_int32(pos):
-        """ Obtain an int from the byte array. Bytes are printed in
+        """Obtain an int from the byte array. Bytes are printed in
         little endian format."""
         return struct.unpack(
-            '<i', bytearray.fromhex(''.join(response_hex[pos:pos + 4])))[0]
+            "<i", bytearray.fromhex("".join(response_hex[pos : pos + 4]))
+        )[0]
 
     rx_power = []
     RX_CHAINS = 4
@@ -204,7 +212,7 @@
 
 
 def get_telephony_signal_strength(ad):
-    #{'evdoEcio': -1, 'asuLevel': 28, 'lteSignalStrength': 14, 'gsmLevel': 0,
+    # {'evdoEcio': -1, 'asuLevel': 28, 'lteSignalStrength': 14, 'gsmLevel': 0,
     # 'cdmaAsuLevel': 99, 'evdoDbm': -120, 'gsmDbm': -1, 'cdmaEcio': -160,
     # 'level': 2, 'lteLevel': 2, 'cdmaDbm': -120, 'dbm': -112, 'cdmaLevel': 0,
     # 'lteAsuLevel': 28, 'gsmAsuLevel': 99, 'gsmBitErrorRate': 0,
@@ -219,12 +227,14 @@
     return signal_strength
 
 
-def initiate_call(log,
-                  ad,
-                  callee_number,
-                  emergency=False,
-                  incall_ui_display=INCALL_UI_DISPLAY_FOREGROUND,
-                  video=False):
+def initiate_call(
+    log,
+    ad,
+    callee_number,
+    emergency=False,
+    incall_ui_display=INCALL_UI_DISPLAY_FOREGROUND,
+    video=False,
+):
     """Make phone call from caller to callee.
 
     Args:
@@ -253,7 +263,8 @@
 
         # Verify OFFHOOK state
         if not wait_for_call_offhook_for_subscription(
-                log, ad, sub_id, event_tracking_started=True):
+            log, ad, sub_id, event_tracking_started=True
+        ):
             ad.log.info("sub_id %s not in call offhook state", sub_id)
             last_call_drop_reason(ad, begin_time=begin_time)
             return False
@@ -299,7 +310,7 @@
         False otherwise.
     """
     try:
-        value_in_event = event['data'][field]
+        value_in_event = event["data"][field]
     except KeyError:
         return False
     for value in value_list:
@@ -319,12 +330,14 @@
         return ad.droid.telecomIsInCall()
     except:
         return "mCallState=2" in ad.adb.shell(
-            "dumpsys telephony.registry | grep mCallState")
+            "dumpsys telephony.registry | grep mCallState"
+        )
 
 
 def last_call_drop_reason(ad, begin_time=None):
     reasons = ad.search_logcat(
-        "qcril_qmi_voice_map_qmi_to_ril_last_call_failure_cause", begin_time)
+        "qcril_qmi_voice_map_qmi_to_ril_last_call_failure_cause", begin_time
+    )
     reason_string = ""
     if reasons:
         log_msg = "Logcat call drop reasons:"
@@ -333,17 +346,15 @@
             if "ril reason str" in reason["log_message"]:
                 reason_string = reason["log_message"].split(":")[-1].strip()
         ad.log.info(log_msg)
-    reasons = ad.search_logcat("ACTION_FORBIDDEN_NO_SERVICE_AUTHORIZATION",
-                               begin_time)
+    reasons = ad.search_logcat("ACTION_FORBIDDEN_NO_SERVICE_AUTHORIZATION", begin_time)
     if reasons:
         ad.log.warning("ACTION_FORBIDDEN_NO_SERVICE_AUTHORIZATION is seen")
-    ad.log.info("last call dumpsys: %s",
-                sorted(dumpsys_last_call_info(ad).items()))
+    ad.log.info("last call dumpsys: %s", sorted(dumpsys_last_call_info(ad).items()))
     return reason_string
 
 
 def toggle_airplane_mode(log, ad, new_state=None, strict_checking=True):
-    """ Toggle the state of airplane mode.
+    """Toggle the state of airplane mode.
 
     Args:
         log: log handler.
@@ -359,11 +370,12 @@
         return toggle_airplane_mode_by_adb(log, ad, new_state)
     else:
         return toggle_airplane_mode_msim(
-            log, ad, new_state, strict_checking=strict_checking)
+            log, ad, new_state, strict_checking=strict_checking
+        )
 
 
 def toggle_airplane_mode_by_adb(log, ad, new_state=None):
-    """ Toggle the state of airplane mode.
+    """Toggle the state of airplane mode.
 
     Args:
         log: log handler.
@@ -392,7 +404,7 @@
 
 
 def toggle_airplane_mode_msim(log, ad, new_state=None, strict_checking=True):
-    """ Toggle the state of airplane mode.
+    """Toggle the state of airplane mode.
 
     Args:
         log: log handler.
@@ -411,13 +423,12 @@
         return True
     elif new_state is None:
         new_state = not cur_state
-        ad.log.info("Toggle APM mode, from current tate %s to %s", cur_state,
-                    new_state)
+        ad.log.info("Toggle APM mode, from current tate %s to %s", cur_state, new_state)
     sub_id_list = []
     active_sub_info = ad.droid.subscriptionGetAllSubInfoList()
     if active_sub_info:
         for info in active_sub_info:
-            sub_id_list.append(info['subscriptionId'])
+            sub_id_list.append(info["subscriptionId"])
 
     ad.ed.clear_all_events()
     time.sleep(0.1)
@@ -436,8 +447,7 @@
         ad.log.info("Turn off airplane mode")
 
     for sub_id in sub_id_list:
-        ad.droid.telephonyStartTrackingServiceStateChangeForSubscription(
-            sub_id)
+        ad.droid.telephonyStartTrackingServiceStateChangeForSubscription(sub_id)
 
     timeout_time = time.time() + MAX_WAIT_TIME_AIRPLANEMODE_EVENT
     ad.droid.connectivityToggleAirplaneMode(new_state)
@@ -449,35 +459,39 @@
                 is_event_match_for_list,
                 timeout=MAX_WAIT_TIME_AIRPLANEMODE_EVENT,
                 field=ServiceStateContainer.SERVICE_STATE,
-                value_list=service_state_list)
+                value_list=service_state_list,
+            )
             ad.log.info("Got event %s", event)
         except Empty:
-            ad.log.warning("Did not get expected service state change to %s",
-                           service_state_list)
+            ad.log.warning(
+                "Did not get expected service state change to %s", service_state_list
+            )
         finally:
             for sub_id in sub_id_list:
-                ad.droid.telephonyStopTrackingServiceStateChangeForSubscription(
-                    sub_id)
+                ad.droid.telephonyStopTrackingServiceStateChangeForSubscription(sub_id)
     except Exception as e:
         ad.log.error(e)
 
     # APM on (new_state=True) will turn off bluetooth but may not turn it on
     try:
         if new_state and not _wait_for_bluetooth_in_state(
-                log, ad, False, timeout_time - time.time()):
-            ad.log.error(
-                "Failed waiting for bluetooth during airplane mode toggle")
-            if strict_checking: return False
+            log, ad, False, timeout_time - time.time()
+        ):
+            ad.log.error("Failed waiting for bluetooth during airplane mode toggle")
+            if strict_checking:
+                return False
     except Exception as e:
         ad.log.error("Failed to check bluetooth state due to %s", e)
         if strict_checking:
             raise
 
     # APM on (new_state=True) will turn off wifi but may not turn it on
-    if new_state and not _wait_for_wifi_in_state(log, ad, False,
-                                                 timeout_time - time.time()):
+    if new_state and not _wait_for_wifi_in_state(
+        log, ad, False, timeout_time - time.time()
+    ):
         ad.log.error("Failed waiting for wifi during airplane mode toggle on")
-        if strict_checking: return False
+        if strict_checking:
+            return False
 
     if ad.droid.connectivityCheckAirplaneMode() != new_state:
         ad.log.error("Set airplane mode to %s failed", new_state)
@@ -516,11 +530,12 @@
 
 
 def wait_for_call_offhook_event(
-        log,
-        ad,
-        sub_id,
-        event_tracking_started=False,
-        timeout=MAX_WAIT_TIME_ACCEPT_CALL_TO_OFFHOOK_EVENT):
+    log,
+    ad,
+    sub_id,
+    event_tracking_started=False,
+    timeout=MAX_WAIT_TIME_ACCEPT_CALL_TO_OFFHOOK_EVENT,
+):
     """Wait for an incoming call on specified subscription.
 
     Args:
@@ -542,25 +557,26 @@
             is_event_match,
             timeout=timeout,
             field=CallStateContainer.CALL_STATE,
-            value=TELEPHONY_STATE_OFFHOOK)
+            value=TELEPHONY_STATE_OFFHOOK,
+        )
         ad.log.info("Got event %s", TELEPHONY_STATE_OFFHOOK)
     except Empty:
         ad.log.info("No event for call state change to OFFHOOK")
         return False
     finally:
         if not event_tracking_started:
-            ad.droid.telephonyStopTrackingCallStateChangeForSubscription(
-                sub_id)
+            ad.droid.telephonyStopTrackingCallStateChangeForSubscription(sub_id)
     return True
 
 
 def wait_for_call_offhook_for_subscription(
-        log,
-        ad,
-        sub_id,
-        event_tracking_started=False,
-        timeout=MAX_WAIT_TIME_ACCEPT_CALL_TO_OFFHOOK_EVENT,
-        interval=WAIT_TIME_BETWEEN_STATE_CHECK):
+    log,
+    ad,
+    sub_id,
+    event_tracking_started=False,
+    timeout=MAX_WAIT_TIME_ACCEPT_CALL_TO_OFFHOOK_EVENT,
+    interval=WAIT_TIME_BETWEEN_STATE_CHECK,
+):
     """Wait for an incoming call on specified subscription.
 
     Args:
@@ -582,32 +598,32 @@
     try:
         while time.time() < end_time:
             if not offhook_event_received:
-                if wait_for_call_offhook_event(log, ad, sub_id, True,
-                                               interval):
+                if wait_for_call_offhook_event(log, ad, sub_id, True, interval):
                     offhook_event_received = True
-            telephony_state = ad.droid.telephonyGetCallStateForSubscription(
-                sub_id)
+            telephony_state = ad.droid.telephonyGetCallStateForSubscription(sub_id)
             telecom_state = ad.droid.telecomGetCallState()
             if telephony_state == TELEPHONY_STATE_OFFHOOK and (
-                    telecom_state == TELEPHONY_STATE_OFFHOOK):
+                telecom_state == TELEPHONY_STATE_OFFHOOK
+            ):
                 ad.log.info("telephony and telecom are in OFFHOOK state")
                 return True
             else:
                 ad.log.info(
                     "telephony in %s, telecom in %s, expecting OFFHOOK state",
-                    telephony_state, telecom_state)
+                    telephony_state,
+                    telecom_state,
+                )
             if offhook_event_received:
                 time.sleep(interval)
     finally:
         if not event_tracking_started:
-            ad.droid.telephonyStopTrackingCallStateChangeForSubscription(
-                sub_id)
+            ad.droid.telephonyStopTrackingCallStateChangeForSubscription(sub_id)
 
 
 def _wait_for_bluetooth_in_state(log, ad, state, max_wait):
     # FIXME: These event names should be defined in a common location
-    _BLUETOOTH_STATE_ON_EVENT = 'BluetoothStateChangedOn'
-    _BLUETOOTH_STATE_OFF_EVENT = 'BluetoothStateChangedOff'
+    _BLUETOOTH_STATE_ON_EVENT = "BluetoothStateChangedOn"
+    _BLUETOOTH_STATE_OFF_EVENT = "BluetoothStateChangedOff"
     ad.ed.clear_events(_BLUETOOTH_STATE_ON_EVENT)
     ad.ed.clear_events(_BLUETOOTH_STATE_OFF_EVENT)
 
@@ -617,20 +633,21 @@
         if bt_state == state:
             return True
         if max_wait <= 0:
-            ad.log.error("Time out: bluetooth state still %s, expecting %s",
-                         bt_state, state)
+            ad.log.error(
+                "Time out: bluetooth state still %s, expecting %s", bt_state, state
+            )
             return False
 
-        event = {
-            False: _BLUETOOTH_STATE_OFF_EVENT,
-            True: _BLUETOOTH_STATE_ON_EVENT
-        }[state]
+        event = {False: _BLUETOOTH_STATE_OFF_EVENT, True: _BLUETOOTH_STATE_ON_EVENT}[
+            state
+        ]
         event = ad.ed.pop_event(event, max_wait)
-        ad.log.info("Got event %s", event['name'])
+        ad.log.info("Got event %s", event["name"])
         return True
     except Empty:
-        ad.log.error("Time out: bluetooth state still in %s, expecting %s",
-                     bt_state, state)
+        ad.log.error(
+            "Time out: bluetooth state still in %s, expecting %s", bt_state, state
+        )
         return False
     finally:
         ad.droid.bluetoothStopListeningForAdapterStateChange()
@@ -651,8 +668,7 @@
     return _wait_for_droid_in_state(log, ad, max_time, is_phone_in_call)
 
 
-def _wait_for_droid_in_state(log, ad, max_time, state_check_func, *args,
-                             **kwargs):
+def _wait_for_droid_in_state(log, ad, max_time, state_check_func, *args, **kwargs):
     while max_time >= 0:
         if state_check_func(log, ad, *args, **kwargs):
             return True
@@ -665,7 +681,10 @@
 
 # TODO: replace this with an event-based function
 def _wait_for_wifi_in_state(log, ad, state, max_wait):
-    return _wait_for_droid_in_state(log, ad, max_wait,
-        lambda log, ad, state: \
-                (True if ad.droid.wifiCheckState() == state else False),
-                state)
+    return _wait_for_droid_in_state(
+        log,
+        ad,
+        max_wait,
+        lambda log, ad, state: (True if ad.droid.wifiCheckState() == state else False),
+        state,
+    )
diff --git a/src/antlion/controllers/anritsu_lib/OWNERS b/src/antlion/controllers/anritsu_lib/OWNERS
deleted file mode 100644
index e4010df..0000000
--- a/src/antlion/controllers/anritsu_lib/OWNERS
+++ /dev/null
@@ -1,4 +0,0 @@
-iguarna@google.com
-chaoyangf@google.com
-yixiang@google.com
-codycaldwell@google.com
\ No newline at end of file
diff --git a/src/antlion/controllers/anritsu_lib/__init__.py b/src/antlion/controllers/anritsu_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/anritsu_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/anritsu_lib/_anritsu_utils.py b/src/antlion/controllers/anritsu_lib/_anritsu_utils.py
deleted file mode 100644
index ea5736f..0000000
--- a/src/antlion/controllers/anritsu_lib/_anritsu_utils.py
+++ /dev/null
@@ -1,233 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Utility functions for for Anritsu Signalling Tester.
-"""
-# yapf: disable
-
-OPERATION_COMPLETE = 1
-NO_ERROR = 0
-
-ANRITSU_ERROR_CODES = {
-    0: 'No errors occurred',
-    2: 'The specified file does not exist',
-    14: 'The buffer size is insufficient',
-    29: 'The save destination is a write-protected file.',
-    80: 'A file with the same name already exists.'
-        ' (If Overwrite is specified to 0.)',
-    87: 'The specified value is wrong.',
-    112: 'The disk space is insufficient.',
-    183: 'SmartStudio is already running.',
-    1060: 'The control software has not been started or has already terminated',
-    1067: 'SmartStudio, control software or SMS Centre could not start due to'
-          'a problem or problems resulting from OS or the MD8475A system.',
-    1229: 'Connecting to the server failed.',
-    1235: 'A request is suspended.',
-    1460: 'The operation is terminated due to the expiration of the'
-          ' timeout period.',
-    9999: 'A GPIB command error occurred.',
-    536870912: 'The license could not be confirmed.',
-    536870913: 'The specified file cannot be loaded by the SmartStudio.',
-    536870914: 'The specified process ID does not exist.',
-    536870915: 'The received data does not exist.',
-    536870916: 'Simulation is not running.',
-    536870917: 'Simulation is running.',
-    536870918: 'Test Case has never been executed.',
-    536870919: 'The resource cannot be obtained.',
-    536870920: 'A resource protocol error, such as download error or'
-               ' license error, occurred.',
-    536870921: 'The function call has been in invalid status.',
-    536870922: 'The current Simulation Model does not allow the operation.',
-    536870923: 'The Cell name to be set does not exist.',
-    536870924: 'The test is being executed.',
-    536870925: 'The current UE status does not correspond to the'
-               ' test parameters.',
-    536870926: 'There is no LOG information because the simulation'
-               ' has not been executed.',
-    536870927: 'Measure Export has already been executed.',
-    536870928: 'SmartStudio is not connected to the SMS Centre.',
-    536870929: 'SmartStudio failed to send an SMS message to the SMS Centre.',
-    536870930: 'SmartStudio has successfully sent an SMS message'
-               ' to the SMS Centre,but the SMS Centre judges it as an error.',
-    536870931: 'The processing that is unavailable with the current system'
-               ' status has been executed.',
-    536870932: 'The option could not be confirmed.',
-    536870933: 'Measure Export has been stopped.',
-    536870934: 'SmartStudio cannot load the specified file because the'
-               ' version is old.',
-    536870935: 'The data with the specified PDN number does not exist.',
-    536870936: 'The data with the specified Dedicated number does not exist.',
-    536870937: 'The PDN data cannot be added because the upper limit of the'
-               ' number of PDN data has been reached.',
-    536870938: 'The number of antennas, which cannot be set to the current'
-               ' Simulation Model,has been specified.',
-    536870939: 'Calibration of path loss failed.',
-    536870940: 'There is a parameter conflict.',
-    536870941: 'The DL Ref Power setting is out of the setting range'
-               ' at W-CDMA (Evolution).',
-    536870942: 'DC-HSDPA is not available for the current channel setting.',
-    536870943: 'The specified Packet Rate cannot be used by the current'
-               ' Simulation Model.',
-    536870944: 'The W-CDMA Cell parameter F-DPCH is set to Enable.',
-    536870945: 'Target is invalid.',
-    536870946: 'The PWS Centre detects an error.',
-    536870947: 'The Ec/Ior setting is invalid.',
-    536870948: 'The combination of Attach Type and TA Update Type is invalid.',
-    536870949: 'The license of the option has expired.',
-    536870950: 'The Ping command is being executed.',
-    536870951: 'The Ping command is not being executed.',
-    536870952: 'The current Test Case parameter setting is wrong.',
-    536870953: 'The specified IP address is the same as that of Default Gateway'
-               'specified by Simulation parameter.',
-    536870954: 'TFT IE conversion failed.',
-    536870955: 'Saving settings to the SmartStudio scenario failed.',
-    536875008: 'An error exists in the parameter configuration.'
-               '(This error applies only to the current version.)',
-    536936448: 'License verification failed.',
-    536936449: 'The IMS Services cannot load the specified file.',
-    536936462: 'Simulation is not performed and no log information exists.',
-    536936467: 'The executed process is inoperable in the current status'
-               ' of Visual User Agent.',
-    536936707: 'The specified Virtual Network is not running.',
-    536936709: 'The specified Virtual Network is running. '
-               'Any one of the Virtual Networks is running.',
-    536936727: 'The specified Virtual Network does not exist.',
-    536936729: 'When the Virtual Network already exists.',
-    554762241: 'The RF Measurement launcher cannot be accessed.',
-    554762242: 'License check of the RF Measurement failed.',
-    554762243: 'Function is called when RF Measurement cannot be set.',
-    554762244: 'RF Measurement has been already started.',
-    554762245: 'RF Measurement failed to start due to a problem resulting'
-               ' from OS or the MD8475A system.',
-    554762246: 'RF Measurement is not started or is already terminated.',
-    554762247: 'There is a version mismatch between RF Measurement and CAL.',
-    554827777: 'The specified value for RF Measurement is abnormal.',
-    554827778: 'GPIB command error has occurred in RF Measurement.',
-    554827779: 'Invalid file path was specified to RF Measurement.',
-    554827780: 'RF Measurement argument is NULL pointer.',
-    555810817: 'RF Measurement is now performing the measurement.',
-    555810818: 'RF Measurement is now not performing the measurement.',
-    555810819: 'RF Measurement is not measured yet. (There is no result '
-               'information since measurement is not performed.)',
-    555810820: 'An error has occurred when RF Measurement'
-               ' starts the measurement.',
-    555810821: 'Simulation has stopped when RF Measurement is '
-               'performing the measurement.',
-    555810822: 'An error has been retrieved from the Platform when '
-               'RF Measurement is performing the measurement.',
-    555810823: 'Measurement has been started in the system state where RF '
-               'Measurement is invalid.',
-    556859393: 'RF Measurement is now saving a file.',
-    556859394: 'There is insufficient disk space when saving'
-               'a Measure Result file of RF Measurement.',
-    556859395: 'An internal error has occurred or USB cable has been'
-               ' disconnected when saving a Measure Result'
-               ' file of RF Measurement.',
-    556859396: 'A write-protected file was specified as the save destination'
-               ' when saving a Measure Result file of RF Measurement.',
-    568328193: 'An internal error has occurred in RF Measurement.',
-    687865857: 'Calibration Measure DSP is now being measured.',
-    687865858: 'Calibration measurement failed.',
-    687865859: 'Calibration slot is empty or its system does not apply.',
-    687865860: 'Unexpected command is received from Calibration HWC.',
-    687865861: 'Failed to receive the Calibration measurement result.',
-    687865862: 'Failed to open the correction value file on the'
-               ' Calibration HDD.',
-    687865863: 'Failed to move the pointer on the Calibration correction'
-               ' value table.',
-    687865864: 'Failed to write the correction value to the Calibration'
-               ' correction value file on the Calibration HDD.',
-    687865865: 'Failed to load the correction value from the Calibration HDD.',
-    687865866: 'Failed to create a directory to which the correction value '
-               'file on the Calibration HDD is saved.',
-    687865867: 'Correction data has not been written in the'
-               ' Calibration-specified correction table.',
-    687865868: 'Data received from Calibration HWC does not exist.',
-    687865869: 'Data has not been written to the Flash ROM'
-               ' of Calibration BASE UNIT.',
-    687865870: 'Correction data has not been written to the'
-               ' Calibration-specified sector.',
-    687866111: 'An calibration error other than described above occurred.',
-}
-
-
-def _error_code_tostring(error_code):
-    ''' returns the description of the error from the error code
-    returned by anritsu MD8475A '''
-    try:
-        error_string = ANRITSU_ERROR_CODES[error_code]
-    except KeyError:
-        error_string = "Error : {} ".format(error_code)
-
-    return error_string
-
-
-class AnritsuUtils(object):
-    def gsm_encode(text):
-        '''To encode text string with GSM 7-bit alphabet for common symbols'''
-        table = {' ': '%20', '!': '%21', '\"': '%22', '#': '%23', '$': '%24',
-                 '/': '%2F', '%': '%25', '&': '%26', '\'': '%27', '(': '%28',
-                 ')': '%29', '*': '%2A', '+': '%2B', ',': '%2C', ':': '%3A',
-                 ';': '%3B', '<': '%3C', '=': '%3D', '>': '%3E', '?': '%3F',
-                 '@': '%40', '[': '%5B', ']': '%5D', '_': '%5F', 'é': '%C3%A9'}
-        coded_str = ""
-        for char in text:
-            if char in table:
-                coded_str += table[char]
-            else:
-                coded_str += char
-        return coded_str
-
-    def gsm_decode(text):
-        '''To decode text string with GSM 7-bit alphabet for common symbols'''
-        table = {'%20': ' ', '%21': '!', '%22': '\"', '%23': '#', '%24': '$',
-                 '%2F': '/', '%25': '%', '%26': '&', '%27': '\'', '%28': '(',
-                 '%29': ')', '%2A': '*', '%2B': '+', '%2C': ',', '%3A': ':',
-                 '%3B': ';', '%3C': '<', '%3D': '=', '%3E': '>', '%3F': '?',
-                 '%40': '@', '%5B': '[', '%5D': ']', '%5F': '_', '%C3%A9': 'é'}
-        coded_str = text
-        for char in table:
-            if char in text:
-                coded_str = coded_str.replace(char, table[char])
-        return coded_str
-
-    def cdma_encode(text):
-        '''To encode text string with GSM 7-bit alphabet for common symbols'''
-        table = {' ': '%20', '!': '%21', '\"': '%22', '#': '%23', '$': '%24',
-                 '/': '%2F', '%': '%25', '&': '%26', '\'': '%27', '(': '%28',
-                 ')': '%29', '*': '%2A', '+': '%2B', ',': '%2C', ':': '%3A',
-                 ';': '%3B', '<': '%3C', '=': '%3D', '>': '%3E', '?': '%3F',
-                 '@': '%40', '[': '%5B', ']': '%5D', '_': '%5F'}
-        coded_str = ""
-        for char in text:
-            if char in table:
-                coded_str += table[char]
-            else:
-                coded_str += char
-        return coded_str
-
-class AnritsuError(Exception):
-    '''Exception for errors related to Anritsu.'''
-    def __init__(self, error, command=None):
-        self._error_code = error
-        self._error_message = _error_code_tostring(self._error_code)
-        if command is not None:
-            self._error_message = "Command {} returned the error: '{}'".format(
-                                  command, self._error_message)
-
-    def __str__(self):
-        return self._error_message
-# yapf: enable
diff --git a/src/antlion/controllers/anritsu_lib/band_constants.py b/src/antlion/controllers/anritsu_lib/band_constants.py
deleted file mode 100644
index 18dd5bc..0000000
--- a/src/antlion/controllers/anritsu_lib/band_constants.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# GSM BAND constants
-GSM_BAND_GSM450 = "GSM450"
-GSM_BAND_GSM480 = "GSM480"
-GSM_BAND_GSM850 = "GSM850"
-GSM_BAND_PGSM900 = "P-GSM900"
-GSM_BAND_EGSM900 = "E-GSM900"
-GSM_BAND_RGSM900 = "R-GSM900"
-GSM_BAND_DCS1800 = "DCS1800"
-GSM_BAND_PCS1900 = "PCS1900"
-
-LTE_BAND_2 = 2
-LTE_BAND_4 = 4
-LTE_BAND_12 = 12
-WCDMA_BAND_1 = 1
-WCDMA_BAND_2 = 2
diff --git a/src/antlion/controllers/anritsu_lib/cell_configurations.py b/src/antlion/controllers/anritsu_lib/cell_configurations.py
deleted file mode 100644
index 83773e0..0000000
--- a/src/antlion/controllers/anritsu_lib/cell_configurations.py
+++ /dev/null
@@ -1,327 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Sanity tests for voice tests in telephony
-"""
-from antlion.controllers.anritsu_lib.band_constants import GSM_BAND_PCS1900
-from antlion.controllers.anritsu_lib.band_constants import GSM_BAND_GSM850
-from antlion.controllers.anritsu_lib.band_constants import LTE_BAND_2
-from antlion.controllers.anritsu_lib.band_constants import LTE_BAND_4
-from antlion.controllers.anritsu_lib.band_constants import LTE_BAND_12
-from antlion.controllers.anritsu_lib.band_constants import WCDMA_BAND_1
-from antlion.controllers.anritsu_lib.band_constants import WCDMA_BAND_2
-from antlion.controllers.anritsu_lib.md8475a import BtsBandwidth
-
-# Different Cell configurations
-# TMO bands
-lte_band4_ch2000_fr2115_pcid1_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 11,
-    'cid': 1,
-    'pcid': 1,
-    'channel': 2000
-}
-
-lte_band4_ch2000_fr2115_pcid2_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 12,
-    'cid': 2,
-    'pcid': 2,
-    'channel': 2000
-}
-
-lte_band4_ch2000_fr2115_pcid3_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 13,
-    'cid': 3,
-    'pcid': 3,
-    'channel': 2000
-}
-
-lte_band4_ch2000_fr2115_pcid4_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 14,
-    'cid': 4,
-    'pcid': 4,
-    'channel': 2000
-}
-
-lte_band4_ch2000_fr2115_pcid5_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 15,
-    'cid': 5,
-    'pcid': 5,
-    'channel': 2000
-}
-
-lte_band4_ch2000_fr2115_pcid6_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 16,
-    'cid': 6,
-    'pcid': 6,
-    'channel': 2000
-}
-
-lte_band4_ch2050_fr2120_pcid7_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 17,
-    'cid': 7,
-    'pcid': 7,
-    'channel': 2050
-}
-
-lte_band4_ch2250_fr2140_pcid8_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 18,
-    'cid': 8,
-    'pcid': 8,
-    'channel': 2250
-}
-
-lte_band2_ch900_fr1960_pcid9_cell = {
-    'band': LTE_BAND_2,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 19,
-    'cid': 9,
-    'pcid': 9,
-    'channel': 900
-}
-
-lte_band12_ch5095_fr737_pcid10_cell = {
-    'band': LTE_BAND_12,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 20,
-    'cid': 10,
-    'pcid': 10,
-    'channel': 5095
-}
-
-wcdma_band1_ch10700_fr2140_cid31_cell = {
-    'band': WCDMA_BAND_1,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 31,
-    'rac': 31,
-    'cid': 31,
-    'channel': 10700,
-    'psc': 31
-}
-
-wcdma_band1_ch10700_fr2140_cid32_cell = {
-    'band': WCDMA_BAND_1,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 32,
-    'rac': 32,
-    'cid': 32,
-    'channel': 10700,
-    'psc': 32
-}
-
-wcdma_band1_ch10700_fr2140_cid33_cell = {
-    'band': WCDMA_BAND_1,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 33,
-    'rac': 33,
-    'cid': 33,
-    'channel': 10700,
-    'psc': 33
-}
-
-wcdma_band1_ch10700_fr2140_cid34_cell = {
-    'band': WCDMA_BAND_1,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 34,
-    'rac': 34,
-    'cid': 34,
-    'channel': 10700,
-    'psc': 34
-}
-
-wcdma_band1_ch10700_fr2140_cid35_cell = {
-    'band': WCDMA_BAND_1,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 35,
-    'rac': 35,
-    'cid': 35,
-    'channel': 10700,
-    'psc': 35
-}
-
-wcdma_band1_ch10575_fr2115_cid36_cell = {
-    'band': WCDMA_BAND_1,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 36,
-    'rac': 36,
-    'cid': 36,
-    'channel': 10575,
-    'psc': 36
-}
-
-wcdma_band1_ch10800_fr2160_cid37_cell = {
-    'band': WCDMA_BAND_1,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 37,
-    'rac': 37,
-    'cid': 37,
-    'channel': 10800,
-    'psc': 37
-}
-
-wcdma_band2_ch9800_fr1960_cid38_cell = {
-    'band': WCDMA_BAND_2,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 38,
-    'rac': 38,
-    'cid': 38,
-    'channel': 9800,
-    'psc': 38
-}
-
-wcdma_band2_ch9900_fr1980_cid39_cell = {
-    'band': WCDMA_BAND_2,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 39,
-    'rac': 39,
-    'cid': 39,
-    'channel': 9900,
-    'psc': 39
-}
-
-gsm_band1900_ch512_fr1930_cid51_cell = {
-    'band': GSM_BAND_PCS1900,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 51,
-    'rac': 51,
-    'cid': 51,
-    'channel': 512,
-}
-
-gsm_band1900_ch512_fr1930_cid52_cell = {
-    'band': GSM_BAND_PCS1900,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 52,
-    'rac': 52,
-    'cid': 52,
-    'channel': 512,
-}
-
-gsm_band1900_ch512_fr1930_cid53_cell = {
-    'band': GSM_BAND_PCS1900,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 53,
-    'rac': 53,
-    'cid': 53,
-    'channel': 512,
-}
-
-gsm_band1900_ch512_fr1930_cid54_cell = {
-    'band': GSM_BAND_PCS1900,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 54,
-    'rac': 54,
-    'cid': 54,
-    'channel': 512,
-}
-
-gsm_band1900_ch512_fr1930_cid55_cell = {
-    'band': GSM_BAND_PCS1900,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 55,
-    'rac': 55,
-    'cid': 55,
-    'channel': 512,
-}
-
-gsm_band1900_ch640_fr1955_cid56_cell = {
-    'band': GSM_BAND_PCS1900,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 56,
-    'rac': 56,
-    'cid': 56,
-    'channel': 640,
-}
-
-gsm_band1900_ch750_fr1977_cid57_cell = {
-    'band': GSM_BAND_PCS1900,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 57,
-    'rac': 57,
-    'cid': 57,
-    'channel': 750,
-}
-
-gsm_band850_ch128_fr869_cid58_cell = {
-    'band': GSM_BAND_GSM850,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 58,
-    'rac': 58,
-    'cid': 58,
-    'channel': 128,
-}
-
-gsm_band850_ch251_fr893_cid59_cell = {
-    'band': GSM_BAND_GSM850,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 59,
-    'rac': 59,
-    'cid': 59,
-    'channel': 251,
-}
diff --git a/src/antlion/controllers/anritsu_lib/md8475_cellular_simulator.py b/src/antlion/controllers/anritsu_lib/md8475_cellular_simulator.py
deleted file mode 100644
index 55a89e9..0000000
--- a/src/antlion/controllers/anritsu_lib/md8475_cellular_simulator.py
+++ /dev/null
@@ -1,732 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import math
-import ntpath
-import time
-import antlion.controllers.cellular_simulator as cc
-from antlion.controllers.cellular_lib import LteSimulation
-from antlion.controllers.anritsu_lib import md8475a
-from antlion.controllers.anritsu_lib import _anritsu_utils as anritsu
-
-
-class MD8475CellularSimulator(cc.AbstractCellularSimulator):
-
-    MD8475_VERSION = 'A'
-
-    # Indicates if it is able to use 256 QAM as the downlink modulation for LTE
-    LTE_SUPPORTS_DL_256QAM = False
-
-    # Indicates if it is able to use 64 QAM as the uplink modulation for LTE
-    LTE_SUPPORTS_UL_64QAM = False
-
-    # Indicates if 4x4 MIMO is supported for LTE
-    LTE_SUPPORTS_4X4_MIMO = False
-
-    # The maximum number of carriers that this simulator can support for LTE
-    LTE_MAX_CARRIERS = 2
-
-    # The maximum power that the equipment is able to transmit
-    MAX_DL_POWER = -10
-
-    # Simulation config files in the callbox computer.
-    # These should be replaced in the future by setting up
-    # the same configuration manually.
-    LTE_BASIC_SIM_FILE = 'SIM_default_LTE.wnssp'
-    LTE_BASIC_CELL_FILE = 'CELL_LTE_config.wnscp'
-    LTE_CA_BASIC_SIM_FILE = 'SIM_LTE_CA.wnssp'
-    LTE_CA_BASIC_CELL_FILE = 'CELL_LTE_CA_config.wnscp'
-
-    # Filepath to the config files stored in the Anritsu callbox. Needs to be
-    # formatted to replace {} with either A or B depending on the model.
-    CALLBOX_CONFIG_PATH = 'C:\\Users\\MD8475A\\Documents\\DAN_configs\\'
-
-    def __init__(self, ip_address):
-        """ Initializes the cellular simulator.
-
-        Args:
-            ip_address: the ip address of the MD8475 instrument
-        """
-        super().__init__()
-
-        try:
-            self.anritsu = md8475a.MD8475A(ip_address,
-                                           md8475_version=self.MD8475_VERSION)
-        except anritsu.AnritsuError:
-            raise cc.CellularSimulatorError('Could not connect to MD8475.')
-
-        self.bts = None
-
-    def destroy(self):
-        """ Sends finalization commands to the cellular equipment and closes
-        the connection. """
-        self.anritsu.stop_simulation()
-        self.anritsu.disconnect()
-
-    def setup_lte_scenario(self):
-        """ Configures the equipment for an LTE simulation. """
-        cell_file_name = self.LTE_BASIC_CELL_FILE
-        sim_file_name = self.LTE_BASIC_SIM_FILE
-
-        cell_file_path = ntpath.join(self.CALLBOX_CONFIG_PATH, cell_file_name)
-        sim_file_path = ntpath.join(self.CALLBOX_CONFIG_PATH, sim_file_name)
-
-        self.anritsu.load_simulation_paramfile(sim_file_path)
-        self.anritsu.load_cell_paramfile(cell_file_path)
-
-        # MD4875A supports only 2 carriers. The MD4875B class adds other cells.
-        self.bts = [
-            self.anritsu.get_BTS(md8475a.BtsNumber.BTS1),
-            self.anritsu.get_BTS(md8475a.BtsNumber.BTS2)
-        ]
-
-    def set_band_combination(self, bands):
-        """ Prepares the test equipment for the indicated band combination.
-
-        The reason why this is implemented in a separate method and not calling
-        LteSimulation.BtsConfig for each separate band is that configuring each
-        ssc cannot be done separately, as it is necessary to know which
-        carriers are on the same band in order to decide which RF outputs can
-        be shared in the test equipment.
-
-        Args:
-            bands: a list of bands represented as ints or strings
-        """
-        self.num_carriers = len(bands)
-
-        # Validate the number of carriers.
-        if self.num_carriers > self.LTE_MAX_CARRIERS:
-            raise cc.CellularSimulatorError('The test equipment supports up '
-                                            'to {} carriers.'.format(
-                                                self.LTE_MAX_CARRIERS))
-
-        # Initialize the base stations in the test equipment
-        self.anritsu.set_simulation_model(
-            *[md8475a.BtsTechnology.LTE for _ in range(self.num_carriers)],
-            reset=False)
-
-        # If base stations use different bands, make sure that the RF cards are
-        # not being shared by setting the right maximum MIMO modes
-        if self.num_carriers == 2:
-            # RF cards are never shared when doing 2CA so 4X4 can be done in
-            # both base stations.
-            self.bts[0].mimo_support = md8475a.LteMimoMode.MIMO_4X4
-            self.bts[1].mimo_support = md8475a.LteMimoMode.MIMO_4X4
-        elif self.num_carriers == 3:
-            # 4X4 can only be done in the second base station if it is shared
-            # with the primary. If the RF cards cannot be shared, then at most
-            # 2X2 can be done.
-            self.bts[0].mimo_support = md8475a.LteMimoMode.MIMO_4X4
-            if bands[0] == bands[1]:
-                self.bts[1].mimo_support = md8475a.LteMimoMode.MIMO_4X4
-            else:
-                self.bts[1].mimo_support = md8475a.LteMimoMode.MIMO_2X2
-            self.bts[2].mimo_support = md8475a.LteMimoMode.MIMO_2X2
-        elif self.num_carriers > 3:
-            raise NotImplementedError('The controller doesn\'t implement more '
-                                      'than 3 carriers for MD8475B yet.')
-
-        # Enable carrier aggregation if there is more than one carrier
-        if self.num_carriers > 1:
-            self.anritsu.set_carrier_aggregation_enabled()
-
-        # Restart the simulation as changing the simulation model will stop it.
-        self.anritsu.start_simulation()
-
-    def set_input_power(self, bts_index, input_power):
-        """ Sets the input power for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            input_power: the new input power
-        """
-        nrb_ul = int(self.bts[bts_index].nrb_ul)
-        max_nrb_ul = self.bts[bts_index].max_nrb_ul
-        input_level = str(
-            round(input_power - 10 * math.log10(nrb_ul / max_nrb_ul), 1))
-        if nrb_ul < max_nrb_ul:
-            self.log.info('Number of UL RBs ({}) is less than the maximum RB '
-                          'allocation ({}). Increasing UL reference power to '
-                          '{} dbm to compensate'.format(
-                              nrb_ul, max_nrb_ul, input_level))
-        self.bts[bts_index].input_level = input_level
-
-    def set_output_power(self, bts_index, output_power):
-        """ Sets the output power for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            output_power: the new output power
-        """
-        self.bts[bts_index].output_level = output_power
-
-    def set_downlink_channel_number(self, bts_index, channel_number):
-        """ Sets the downlink channel number for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            channel_number: the new channel number
-        """
-        # Temporarily adding this line to workaround a bug in the
-        # Anritsu callbox in which the channel number needs to be set
-        # to a different value before setting it to the final one.
-        self.bts[bts_index].dl_channel = str(int(channel_number + 1))
-        time.sleep(8)
-        self.bts[bts_index].dl_channel = str(int(channel_number))
-
-    def set_dl_256_qam_enabled(self, bts_index, enabled):
-        """ Determines what MCS table should be used for the downlink.
-
-        Args:
-            bts_index: the base station number
-            enabled: whether 256 QAM should be used
-        """
-        if enabled and not self.LTE_SUPPORTS_DL_256QAM:
-            raise RuntimeError('256 QAM is not supported')
-        self.bts[bts_index].lte_dl_modulation_order = \
-            md8475a.ModulationType.Q256 if enabled else md8475a.ModulationType.Q64
-
-    def set_ul_64_qam_enabled(self, bts_index, enabled):
-        """ Determines what MCS table should be used for the uplink.
-
-        Args:
-            bts_index: the base station number
-            enabled: whether 64 QAM should be used
-        """
-        self.bts[bts_index].lte_ul_modulation_order = \
-            md8475a.ModulationType.Q64 if enabled else md8475a.ModulationType.Q16
-
-    def set_mac_padding(self, bts_index, mac_padding):
-        """ Enables or disables MAC padding in the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            mac_padding: the new MAC padding setting
-        """
-        if mac_padding:
-            self.bts[bts_index].tbs_pattern = 'FULLALLOCATION'
-        else:
-            self.bts[bts_index].tbs_pattern = 'OFF'
-
-    def set_lte_rrc_state_change_timer(self, enabled, time=10):
-        """ Configures the LTE RRC state change timer.
-
-        Args:
-            enabled: a boolean indicating if the timer should be on or off.
-            time: time in seconds for the timer to expire
-        """
-        self.anritsu.set_lte_rrc_status_change(enabled)
-        if enabled:
-            self.anritsu.set_lte_rrc_status_change_timer(time)
-
-    def set_cfi(self, bts_index, cfi):
-        """ Sets the Channel Format Indicator for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            cfi: the new CFI setting
-        """
-        self.bts[bts_index].cfi = cfi
-
-    def set_paging_cycle(self, bts_index, cycle_duration):
-        """ Sets the paging cycle duration for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            cycle_duration: the new paging cycle duration in milliseconds
-        """
-        # TODO (b/146068532): implement.
-        self.bts[bts_index].paging_duration = cycle_duration
-
-    def set_phich_resource(self, bts_index, phich):
-        """ Sets the PHICH Resource setting for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            phich: the new PHICH resource setting
-        """
-        self.bts[bts_index].phich_resource = phich
-
-    def set_drx_connected_mode(self, bts_index, active):
-        """ Sets the DRX connected mode
-
-        Args:
-            bts_index: the base station number
-            active: Boolean indicating whether cDRX mode
-                is active
-        """
-        mode = 'MANUAL' if active else 'OFF'
-        self.bts[bts_index].drx_connected_mode = mode
-
-    def set_drx_on_duration_timer(self, bts_index, timer):
-        """ Sets the amount of PDCCH subframes to wait for data after
-            waking up from a DRX cycle
-
-        Args:
-            bts_index: the base station number
-            timer: Number of PDCCH subframes to wait and check for user data
-                after waking from the DRX cycle
-        """
-        self.bts[bts_index].drx_on_duration_timer = timer
-
-    def set_drx_inactivity_timer(self, bts_index, timer):
-        """ Sets the number of PDCCH subframes to wait before entering DRX mode
-
-        Args:
-            bts_index: the base station number
-            timer: The time interval to wait before entering DRX mode
-        """
-        self.bts[bts_index].drx_inactivity_timer = timer
-
-    def set_drx_retransmission_timer(self, bts_index, timer):
-        """ Sets the number of consecutive PDCCH subframes to wait
-        for retransmission
-
-        Args:
-            bts_index: the base station number
-            timer: Number of PDCCH subframes to remain active
-
-        """
-        self.bts[bts_index].drx_retransmission_timer = timer
-
-    def set_drx_long_cycle(self, bts_index, cycle):
-        """ Sets the amount of subframes representing a DRX long cycle.
-
-        Args:
-            bts_index: the base station number
-            cycle: The amount of subframes representing one long DRX cycle.
-                One cycle consists of DRX sleep + DRX on duration
-        """
-        self.bts[bts_index].drx_long_cycle = cycle
-
-    def set_drx_long_cycle_offset(self, bts_index, offset):
-        """ Sets the offset used to determine the subframe number
-        to begin the long drx cycle
-
-        Args:
-            bts_index: the base station number
-            offset: Number in range 0 to (long cycle - 1)
-        """
-        self.bts[bts_index].drx_long_cycle_offset = offset
-
-    def set_band(self, bts_index, band):
-        """ Sets the right duplex mode before switching to a new band.
-
-        Args:
-            bts_index: the base station number
-            band: desired band
-        """
-        bts = self.bts[bts_index]
-
-        # The callbox won't restore the band-dependent default values if the
-        # request is to switch to the same band as the one the base station is
-        # currently using. To ensure that default values are restored, go to a
-        # different band before switching.
-        if int(bts.band) == band:
-            # Using bands 1 and 2 but it could be any others
-            bts.band = '1' if band != 1 else '2'
-            # Switching to config.band will be handled by the parent class
-            # implementation of this method.
-
-        bts.duplex_mode = self.get_duplex_mode(band).value
-        bts.band = band
-        time.sleep(5)  # It takes some time to propagate the new band
-
-    def get_duplex_mode(self, band):
-        """ Determines if the band uses FDD or TDD duplex mode
-
-        Args:
-            band: a band number
-        Returns:
-            an variable of class DuplexMode indicating if band is FDD or TDD
-        """
-
-        if 33 <= int(band) <= 46:
-            return LteSimulation.DuplexMode.TDD
-        else:
-            return LteSimulation.DuplexMode.FDD
-
-    def set_tdd_config(self, bts_index, config):
-        """ Sets the frame structure for TDD bands.
-
-        Args:
-            bts_index: the base station number
-            config: the desired frame structure. An int between 0 and 6.
-        """
-
-        if not 0 <= config <= 6:
-            raise ValueError("The frame structure configuration has to be a "
-                             "number between 0 and 6")
-
-        self.bts[bts_index].uldl_configuration = config
-
-        # Wait for the setting to propagate
-        time.sleep(5)
-
-    def set_ssf_config(self, bts_index, ssf_config):
-        """ Sets the Special Sub-Frame config number for the indicated
-        base station.
-
-        Args:
-            bts_index: the base station number
-            ssf_config: the new ssf config number
-        """
-        # Cast to int in case it was passed as a string
-        ssf_config = int(ssf_config)
-
-        if not 0 <= ssf_config <= 9:
-            raise ValueError('The Special Sub-Frame configuration has to be a '
-                             'number between 0 and 9.')
-
-        self.bts[bts_index].tdd_special_subframe = ssf_config
-
-    def set_bandwidth(self, bts_index, bandwidth):
-        """ Sets the LTE channel bandwidth (MHz)
-
-        Args:
-            bts_index: the base station number
-            bandwidth: desired bandwidth (MHz)
-        """
-        bts = self.bts[bts_index]
-
-        if bandwidth == 20:
-            bts.bandwidth = md8475a.BtsBandwidth.LTE_BANDWIDTH_20MHz
-        elif bandwidth == 15:
-            bts.bandwidth = md8475a.BtsBandwidth.LTE_BANDWIDTH_15MHz
-        elif bandwidth == 10:
-            bts.bandwidth = md8475a.BtsBandwidth.LTE_BANDWIDTH_10MHz
-        elif bandwidth == 5:
-            bts.bandwidth = md8475a.BtsBandwidth.LTE_BANDWIDTH_5MHz
-        elif bandwidth == 3:
-            bts.bandwidth = md8475a.BtsBandwidth.LTE_BANDWIDTH_3MHz
-        elif bandwidth == 1.4:
-            bts.bandwidth = md8475a.BtsBandwidth.LTE_BANDWIDTH_1dot4MHz
-        else:
-            msg = "Bandwidth = {} MHz is not valid for LTE".format(bandwidth)
-            self.log.error(msg)
-            raise ValueError(msg)
-        time.sleep(5)  # It takes some time to propagate the new settings
-
-    def set_mimo_mode(self, bts_index, mimo):
-        """ Sets the number of DL antennas for the desired MIMO mode.
-
-        Args:
-            bts_index: the base station number
-            mimo: object of class MimoMode
-        """
-
-        bts = self.bts[bts_index]
-
-        # If the requested mimo mode is not compatible with the current TM,
-        # warn the user before changing the value.
-
-        if mimo == LteSimulation.MimoMode.MIMO_1x1:
-            if bts.transmode not in [
-                    LteSimulation.TransmissionMode.TM1,
-                    LteSimulation.TransmissionMode.TM7
-            ]:
-                self.log.warning(
-                    "Using only 1 DL antennas is not allowed with "
-                    "the current transmission mode. Changing the "
-                    "number of DL antennas will override this "
-                    "setting.")
-            bts.dl_antenna = 1
-        elif mimo == LteSimulation.MimoMode.MIMO_2x2:
-            if bts.transmode not in [
-                    LteSimulation.TransmissionMode.TM2,
-                    LteSimulation.TransmissionMode.TM3,
-                    LteSimulation.TransmissionMode.TM4,
-                    LteSimulation.TransmissionMode.TM8,
-                    LteSimulation.TransmissionMode.TM9
-            ]:
-                self.log.warning("Using two DL antennas is not allowed with "
-                                 "the current transmission mode. Changing the "
-                                 "number of DL antennas will override this "
-                                 "setting.")
-            bts.dl_antenna = 2
-        elif mimo == LteSimulation.MimoMode.MIMO_4x4 and \
-            self.LTE_SUPPORTS_4X4_MIMO:
-            if bts.transmode not in [
-                    LteSimulation.TransmissionMode.TM2,
-                    LteSimulation.TransmissionMode.TM3,
-                    LteSimulation.TransmissionMode.TM4,
-                    LteSimulation.TransmissionMode.TM9
-            ]:
-                self.log.warning("Using four DL antennas is not allowed with "
-                                 "the current transmission mode. Changing the "
-                                 "number of DL antennas will override this "
-                                 "setting.")
-
-            bts.dl_antenna = 4
-        else:
-            RuntimeError("The requested MIMO mode is not supported.")
-
-    def set_scheduling_mode(self, bts_index, scheduling, mcs_dl, mcs_ul,
-                            nrb_dl, nrb_ul):
-        """ Sets the scheduling mode for LTE
-
-        Args:
-            bts_index: the base station number
-            scheduling: DYNAMIC or STATIC scheduling (Enum list)
-            mcs_dl: Downlink MCS (only for STATIC scheduling)
-            mcs_ul: Uplink MCS (only for STATIC scheduling)
-            nrb_dl: Number of RBs for downlink (only for STATIC scheduling)
-            nrb_ul: Number of RBs for uplink (only for STATIC scheduling)
-        """
-
-        bts = self.bts[bts_index]
-        bts.lte_scheduling_mode = scheduling.value
-
-        if scheduling == LteSimulation.SchedulingMode.STATIC:
-
-            if not all([nrb_dl, nrb_ul, mcs_dl, mcs_ul]):
-                raise ValueError('When the scheduling mode is set to manual, '
-                                 'the RB and MCS parameters are required.')
-
-            bts.packet_rate = md8475a.BtsPacketRate.LTE_MANUAL
-            bts.lte_mcs_dl = mcs_dl
-            bts.lte_mcs_ul = mcs_ul
-            bts.nrb_dl = nrb_dl
-            bts.nrb_ul = nrb_ul
-
-        time.sleep(5)  # It takes some time to propagate the new settings
-
-    def lte_attach_secondary_carriers(self, ue_capability_enquiry):
-        """ Activates the secondary carriers for CA. Requires the DUT to be
-        attached to the primary carrier first.
-
-        Args:
-            ue_capability_enquiry: UE capability enquiry message to be sent to
-        the UE before starting carrier aggregation.
-        """
-
-        # Trigger UE capability enquiry from network to get
-        # UE supported CA band combinations. Here freq_bands is a hex string.
-        self.anritsu.trigger_ue_capability_enquiry(ue_capability_enquiry)
-
-        testcase = self.anritsu.get_AnritsuTestCases()
-        # A bug in the instrument's software (b/139547391) requires the test
-        # procedure to be set to whatever was the previous configuration before
-        # setting it to MULTICELL.
-        testcase.procedure = md8475a.TestProcedure(testcase.procedure)
-        testcase.procedure = md8475a.TestProcedure.PROCEDURE_MULTICELL
-        testcase.power_control = md8475a.TestPowerControl.POWER_CONTROL_DISABLE
-        testcase.measurement_LTE = md8475a.TestMeasurement.MEASUREMENT_DISABLE
-
-        # Enable the secondary carrier base stations for CA
-        for bts_index in range(1, self.num_carriers):
-            self.bts[bts_index].dl_cc_enabled = True
-
-        self.anritsu.start_testcase()
-
-        retry_counter = 0
-        self.log.info("Waiting for the test case to start...")
-        time.sleep(5)
-
-        while self.anritsu.get_testcase_status() == "0":
-            retry_counter += 1
-            if retry_counter == 3:
-                raise RuntimeError(
-                    "The test case failed to start after {} "
-                    "retries. The connection between the phone "
-                    "and the base station might be unstable.".format(
-                        retry_counter))
-            time.sleep(10)
-
-    def set_transmission_mode(self, bts_index, tmode):
-        """ Sets the transmission mode for the LTE basetation
-
-        Args:
-            bts_index: the base station number
-            tmode: Enum list from class 'TransmissionModeLTE'
-        """
-
-        bts = self.bts[bts_index]
-
-        # If the selected transmission mode does not support the number of DL
-        # antennas, throw an exception.
-        if (tmode in [
-                LteSimulation.TransmissionMode.TM1,
-                LteSimulation.TransmissionMode.TM7
-        ] and bts.dl_antenna != '1'):
-            # TM1 and TM7 only support 1 DL antenna
-            raise ValueError("{} allows only one DL antenna. Change the "
-                             "number of DL antennas before setting the "
-                             "transmission mode.".format(tmode.value))
-        elif (tmode == LteSimulation.TransmissionMode.TM8
-              and bts.dl_antenna != '2'):
-            # TM8 requires 2 DL antennas
-            raise ValueError("TM2 requires two DL antennas. Change the "
-                             "number of DL antennas before setting the "
-                             "transmission mode.")
-        elif (tmode in [
-                LteSimulation.TransmissionMode.TM2,
-                LteSimulation.TransmissionMode.TM3,
-                LteSimulation.TransmissionMode.TM4,
-                LteSimulation.TransmissionMode.TM9
-        ] and bts.dl_antenna == '1'):
-            # TM2, TM3, TM4 and TM9 require 2 or 4 DL antennas
-            raise ValueError("{} requires at least two DL atennas. Change the "
-                             "number of DL antennas before setting the "
-                             "transmission mode.".format(tmode.value))
-
-        # The TM mode is allowed for the current number of DL antennas, so it
-        # is safe to change this setting now
-        bts.transmode = tmode.value
-
-        time.sleep(5)  # It takes some time to propagate the new settings
-
-    def wait_until_attached(self, timeout=120):
-        """ Waits until the DUT is attached to the primary carrier.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        try:
-            self.anritsu.wait_for_registration_state(time_to_wait=timeout)
-        except anritsu.AnritsuError:
-            raise cc.CellularSimulatorError('The phone did not attach before '
-                                            'the timeout period ended.')
-
-    def wait_until_communication_state(self, timeout=120):
-        """ Waits until the DUT is in Communication state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        try:
-            self.anritsu.wait_for_communication_state(time_to_wait=timeout)
-        except anritsu.AnritsuError:
-            raise cc.CellularSimulatorError('The phone was not in '
-                                            'Communication state before '
-                                            'the timeout period ended.')
-
-    def wait_until_idle_state(self, timeout=120):
-        """ Waits until the DUT is in Idle state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        try:
-            self.anritsu.wait_for_idle_state(time_to_wait=timeout)
-        except anritsu.AnritsuError:
-            raise cc.CellularSimulatorError('The phone was not in Idle state '
-                                            'before the time the timeout '
-                                            'period ended.')
-
-    def detach(self):
-        """ Turns off all the base stations so the DUT loose connection."""
-        if self.anritsu.get_smartstudio_status() == \
-            md8475a.ProcessingStatus.PROCESS_STATUS_NOTRUN.value:
-            self.log.info('Device cannot be detached because simulation is '
-                          'not running.')
-            return
-        self.anritsu.set_simulation_state_to_poweroff()
-
-    def stop(self):
-        """ Stops current simulation. After calling this method, the simulator
-        will need to be set up again. """
-        self.anritsu.stop_simulation()
-
-    def start_data_traffic(self):
-        """ Starts transmitting data from the instrument to the DUT. """
-        try:
-            self.anritsu.start_ip_traffic()
-        except md8475a.AnritsuError as inst:
-            # This typically happens when traffic is already running.
-            # TODO (b/141962691): continue only if traffic is running
-            self.log.warning(str(inst))
-        time.sleep(4)
-
-    def stop_data_traffic(self):
-        """ Stops transmitting data from the instrument to the DUT. """
-        try:
-            self.anritsu.stop_ip_traffic()
-        except md8475a.AnritsuError as inst:
-            # This typically happens when traffic has already been stopped
-            # TODO (b/141962691): continue only if traffic is stopped
-            self.log.warning(str(inst))
-        time.sleep(2)
-
-    def get_measured_pusch_power(self):
-        """ Queries PUSCH power measured at the callbox.
-
-        Returns:
-            The PUSCH power in the primary input port.
-        """
-        # Try three times before raising an exception. This is needed because
-        # the callbox sometimes reports an active chain as 'DEACTIVE'.
-        retries_left = 3
-
-        while retries_left > 0:
-
-            ul_pusch = self.anritsu.get_measured_pusch_power().split(',')[0]
-
-            if ul_pusch != 'DEACTIVE':
-                return float(ul_pusch)
-
-            time.sleep(3)
-            retries_left -= 1
-            self.log.info('Chain shows as inactive. %d retries left.' %
-                          retries_left)
-
-        raise cc.CellularSimulatorError('Could not get measured PUSCH power.')
-
-
-class MD8475BCellularSimulator(MD8475CellularSimulator):
-
-    MD8475_VERSION = 'B'
-
-    # Indicates if it is able to use 256 QAM as the downlink modulation for LTE
-    LTE_SUPPORTS_DL_256QAM = True
-
-    # Indicates if it is able to use 64 QAM as the uplink modulation for LTE
-    LTE_SUPPORTS_UL_64QAM = True
-
-    # Indicates if 4x4 MIMO is supported for LTE
-    LTE_SUPPORTS_4X4_MIMO = True
-
-    # The maximum number of carriers that this simulator can support for LTE
-    LTE_MAX_CARRIERS = 4
-
-    # The maximum power that the equipment is able to transmit
-    MAX_DL_POWER = -30
-
-    # Simulation config files in the callbox computer.
-    # These should be replaced in the future by setting up
-    # the same configuration manually.
-    LTE_BASIC_SIM_FILE = 'SIM_default_LTE.wnssp2'
-    LTE_BASIC_CELL_FILE = 'CELL_LTE_config.wnscp2'
-    LTE_CA_BASIC_SIM_FILE = 'SIM_LTE_CA.wnssp2'
-    LTE_CA_BASIC_CELL_FILE = 'CELL_LTE_CA_config.wnscp2'
-
-    # Filepath to the config files stored in the Anritsu callbox. Needs to be
-    # formatted to replace {} with either A or B depending on the model.
-    CALLBOX_CONFIG_PATH = 'C:\\Users\\MD8475B\\Documents\\DAN_configs\\'
-
-    def setup_lte_scenario(self):
-        """ The B model can support up to five carriers. """
-
-        super().setup_lte_scenario()
-
-        self.bts.extend([
-            self.anritsu.get_BTS(md8475a.BtsNumber.BTS3),
-            self.anritsu.get_BTS(md8475a.BtsNumber.BTS4),
-            self.anritsu.get_BTS(md8475a.BtsNumber.BTS5)
-        ])
diff --git a/src/antlion/controllers/anritsu_lib/md8475a.py b/src/antlion/controllers/anritsu_lib/md8475a.py
deleted file mode 100644
index ac67229..0000000
--- a/src/antlion/controllers/anritsu_lib/md8475a.py
+++ /dev/null
@@ -1,5066 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Controller interface for Anritsu Signalling Tester MD8475A.
-"""
-
-import logging
-import time
-import socket
-from enum import Enum
-from enum import IntEnum
-
-from antlion.controllers.anritsu_lib._anritsu_utils import AnritsuError
-from antlion.controllers.anritsu_lib._anritsu_utils import AnritsuUtils
-from antlion.controllers.anritsu_lib._anritsu_utils import NO_ERROR
-from antlion.controllers.anritsu_lib._anritsu_utils import OPERATION_COMPLETE
-
-from antlion import tracelogger
-
-TERMINATOR = "\0"
-
-# The following wait times (except COMMUNICATION_STATE_WAIT_TIME) are actually
-# the times for socket to time out. Increasing them is to make sure there is
-# enough time for MD8475A operation to be completed in some cases.
-# It won't increase test execution time.
-SMARTSTUDIO_LAUNCH_WAIT_TIME = 300  # was 90
-SMARTSTUDIO_SIMULATION_START_WAIT_TIME = 300  # was 120
-REGISTRATION_STATE_WAIT_TIME = 240
-LOAD_SIMULATION_PARAM_FILE_WAIT_TIME = 30
-COMMUNICATION_STATE_WAIT_TIME = 240
-ANRITSU_SOCKET_BUFFER_SIZE = 8192
-COMMAND_COMPLETE_WAIT_TIME = 180  # was 90
-SETTLING_TIME = 1
-WAIT_TIME_IDENTITY_RESPONSE = 5
-IDLE_STATE_WAIT_TIME = 240
-
-IMSI_READ_USERDATA_WCDMA = "081501"
-IMEI_READ_USERDATA_WCDMA = "081502"
-IMEISV_READ_USERDATA_WCDMA = "081503"
-IMSI_READ_USERDATA_LTE = "075501"
-IMEI_READ_USERDATA_LTE = "075502"
-IMEISV_READ_USERDATA_LTE = "075503"
-IMSI_READ_USERDATA_GSM = "081501"
-IMEI_READ_USERDATA_GSM = "081502"
-IMEISV_READ_USERDATA_GSM = "081503"
-IDENTITY_REQ_DATA_LEN = 24
-SEQ_LOG_MESSAGE_START_INDEX = 60
-
-WCDMA_BANDS = {
-    "I": "1",
-    "II": "2",
-    "III": "3",
-    "IV": "4",
-    "V": "5",
-    "VI": "6",
-    "VII": "7",
-    "VIII": "8",
-    "IX": "9",
-    "X": "10",
-    "XI": "11",
-    "XII": "12",
-    "XIII": "13",
-    "XIV": "14"
-}
-
-
-def create(configs):
-    objs = []
-    for c in configs:
-        ip_address = c["ip_address"]
-        objs.append(MD8475A(ip_address))
-    return objs
-
-
-def destroy(objs):
-    return
-
-
-class ProcessingStatus(Enum):
-    ''' MD8475A processing status for UE,Packet,Voice,Video,SMS,
-        PPP, PWS '''
-    PROCESS_STATUS_NONE = "NONE"
-    PROCESS_STATUS_NOTRUN = "NOTRUN"
-    PROCESS_STATUS_POWEROFF = "POWEROFF"
-    PROCESS_STATUS_REGISTRATION = "REGISTRATION"
-    PROCESS_STATUS_DETACH = "DETACH"
-    PROCESS_STATUS_IDLE = "IDLE"
-    PROCESS_STATUS_ORIGINATION = "ORIGINATION"
-    PROCESS_STATUS_HANDOVER = "HANDOVER"
-    PROCESS_STATUS_UPDATING = "UPDATING"
-    PROCESS_STATUS_TERMINATION = "TERMINATION"
-    PROCESS_STATUS_COMMUNICATION = "COMMUNICATION"
-    PROCESS_STATUS_UERELEASE = "UERELEASE"
-    PROCESS_STATUS_NWRELEASE = "NWRELEASE"
-
-
-class BtsNumber(Enum):
-    '''ID number for MD8475A supported BTS '''
-    BTS1 = "BTS1"
-    BTS2 = "BTS2"
-    BTS3 = "BTS3"
-    BTS4 = "BTS4"
-    BTS5 = "BTS5"
-
-
-class BtsTechnology(Enum):
-    ''' BTS system technology'''
-    LTE = "LTE"
-    WCDMA = "WCDMA"
-    TDSCDMA = "TDSCDMA"
-    GSM = "GSM"
-    CDMA1X = "CDMA1X"
-    EVDO = "EVDO"
-
-
-class BtsBandwidth(Enum):
-    ''' Values for Cell Bandwidth '''
-    LTE_BANDWIDTH_1dot4MHz = "1.4MHz"
-    LTE_BANDWIDTH_3MHz = "3MHz"
-    LTE_BANDWIDTH_5MHz = "5MHz"
-    LTE_BANDWIDTH_10MHz = "10MHz"
-    LTE_BANDWIDTH_15MHz = "15MHz"
-    LTE_BANDWIDTH_20MHz = "20MHz"
-
-    def get_float_value(bts_bandwidth):
-        """ Returns a float representing the bandwidth in MHz.
-
-        Args:
-            bts_bandwidth: a BtsBandwidth enum or a string matching one of the
-            values in the BtsBandwidth enum.
-        """
-
-        if isinstance(bts_bandwidth, BtsBandwidth):
-            bandwidth_str = bts_bandwidth.value
-        elif isinstance(bts_bandwidth, str):
-            bandwidth_str = bts_bandwidth
-        else:
-            raise TypeError('bts_bandwidth should be an instance of string or '
-                            'BtsBandwidth. ')
-
-        if bandwidth_str == BtsBandwidth.LTE_BANDWIDTH_20MHz.value:
-            return 20
-        elif bandwidth_str == BtsBandwidth.LTE_BANDWIDTH_15MHz.value:
-            return 15
-        elif bandwidth_str == BtsBandwidth.LTE_BANDWIDTH_10MHz.value:
-            return 10
-        elif bandwidth_str == BtsBandwidth.LTE_BANDWIDTH_5MHz.value:
-            return 5
-        elif bandwidth_str == BtsBandwidth.LTE_BANDWIDTH_3MHz.value:
-            return 3
-        elif bandwidth_str == BtsBandwidth.LTE_BANDWIDTH_1dot4MHz.value:
-            return 1.4
-        else:
-            raise ValueError(
-                'Could not map {} to a bandwidth value.'.format(bandwidth_str))
-
-
-MAX_NRB_FOR_BANDWIDTH = {
-    BtsBandwidth.LTE_BANDWIDTH_1dot4MHz.value: 6,
-    BtsBandwidth.LTE_BANDWIDTH_3MHz.value: 15,
-    BtsBandwidth.LTE_BANDWIDTH_5MHz.value: 25,
-    BtsBandwidth.LTE_BANDWIDTH_10MHz.value: 50,
-    BtsBandwidth.LTE_BANDWIDTH_15MHz.value: 75,
-    BtsBandwidth.LTE_BANDWIDTH_20MHz.value: 100
-}
-
-
-class LteMimoMode(Enum):
-    """ Values for LTE MIMO modes. """
-    NONE = "MIMONOT"
-    MIMO_2X2 = "MIMO2X2"
-    MIMO_4X4 = "MIMO4X4"
-
-
-class BtsGprsMode(Enum):
-    ''' Values for Gprs Modes '''
-    NO_GPRS = "NO_GPRS"
-    GPRS = "GPRS"
-    EGPRS = "EGPRS"
-
-
-class BtsPacketRate(Enum):
-    ''' Values for Cell Packet rate '''
-    LTE_MANUAL = "MANUAL"
-    LTE_BESTEFFORT = "BESTEFFORT"
-    WCDMA_DL384K_UL64K = "DL384K_UL64K"
-    WCDMA_DLHSAUTO_REL7_UL384K = "DLHSAUTO_REL7_UL384K"
-    WCDMA_DL18_0M_UL384K = "DL18_0M_UL384K"
-    WCDMA_DL21_6M_UL384K = "DL21_6M_UL384K"
-    WCDMA_DLHSAUTO_REL7_ULHSAUTO = "DLHSAUTO_REL7_ULHSAUTO"
-    WCDMA_DL18_0M_UL1_46M = "DL18_0M_UL1_46M"
-    WCDMA_DL18_0M_UL2_0M = "DL18_0M_UL2_0M"
-    WCDMA_DL18_0M_UL5_76M = "DL18_0M_UL5_76M"
-    WCDMA_DL21_6M_UL1_46M = "DL21_6M_UL1_46M"
-    WCDMA_DL21_6M_UL2_0M = "DL21_6M_UL2_0M"
-    WCDMA_DL21_6M_UL5_76M = "DL21_6M_UL5_76M"
-    WCDMA_DLHSAUTO_REL8_UL384K = "DLHSAUTO_REL8_UL384K"
-    WCDMA_DL23_4M_UL384K = "DL23_4M_UL384K"
-    WCDMA_DL28_0M_UL384K = "DL28_0M_UL384K"
-    WCDMA_DL36_0M_UL384K = "DL36_0M_UL384K"
-    WCDMA_DL43_2M_UL384K = "DL43_2M_UL384K"
-    WCDMA_DLHSAUTO_REL8_ULHSAUTO = "DLHSAUTO_REL8_ULHSAUTO"
-    WCDMA_DL23_4M_UL1_46M = "DL23_4M_UL1_46M"
-    WCDMA_DL23_4M_UL2_0M = "DL23_4M_UL2_0M"
-    WCDMA_DL23_4M_UL5_76M = "DL23_4M_UL5_76M"
-    WCDMA_DL28_0M_UL1_46M = "DL28_0M_UL1_46M"
-    WCDMA_DL28_0M_UL2_0M = "DL28_0M_UL2_0M"
-    WCDMA_DL28_0M_UL5_76M = "L28_0M_UL5_76M"
-    WCDMA_DL36_0M_UL1_46M = "DL36_0M_UL1_46M"
-    WCDMA_DL36_0M_UL2_0M = "DL36_0M_UL2_0M"
-    WCDMA_DL36_0M_UL5_76M = "DL36_0M_UL5_76M"
-    WCDMA_DL43_2M_UL1_46M = "DL43_2M_UL1_46M"
-    WCDMA_DL43_2M_UL2_0M = "DL43_2M_UL2_0M"
-    WCDMA_DL43_2M_UL5_76M = "DL43_2M_UL5_76M"
-
-
-class BtsPacketWindowSize(Enum):
-    ''' Values for Cell Packet window size '''
-    WINDOW_SIZE_1 = 1
-    WINDOW_SIZE_8 = 8
-    WINDOW_SIZE_16 = 16
-    WINDOW_SIZE_32 = 32
-    WINDOW_SIZE_64 = 64
-    WINDOW_SIZE_128 = 128
-    WINDOW_SIZE_256 = 256
-    WINDOW_SIZE_512 = 512
-    WINDOW_SIZE_768 = 768
-    WINDOW_SIZE_1024 = 1024
-    WINDOW_SIZE_1536 = 1536
-    WINDOW_SIZE_2047 = 2047
-
-
-class BtsServiceState(Enum):
-    ''' Values for BTS service state '''
-    SERVICE_STATE_IN = "IN"
-    SERVICE_STATE_OUT = "OUT"
-
-
-class BtsCellBarred(Enum):
-    ''' Values for Cell barred parameter '''
-    NOTBARRED = "NOTBARRED"
-    BARRED = "BARRED"
-
-
-class BtsAccessClassBarred(Enum):
-    ''' Values for Access class barred parameter '''
-    NOTBARRED = "NOTBARRED"
-    EMERGENCY = "EMERGENCY"
-    BARRED = "BARRED"
-    USERSPECIFIC = "USERSPECIFIC"
-
-
-class BtsLteEmergencyAccessClassBarred(Enum):
-    ''' Values for Lte emergency access class barred parameter '''
-    NOTBARRED = "NOTBARRED"
-    BARRED = "BARRED"
-
-
-class BtsNwNameEnable(Enum):
-    ''' Values for BT network name enable parameter '''
-    NAME_ENABLE = "ON"
-    NAME_DISABLE = "OFF"
-
-
-class IPAddressType(Enum):
-    ''' Values for IP address type '''
-    IPV4 = "IPV4"
-    IPV6 = "IPV6"
-    IPV4V6 = "IPV4V6"
-
-
-class TriggerMessageIDs(Enum):
-    ''' ID for Trigger messages  '''
-    RRC_CONNECTION_REQ = 111101
-    RRC_CONN_REESTABLISH_REQ = 111100
-    ATTACH_REQ = 141141
-    DETACH_REQ = 141145
-    MM_LOC_UPDATE_REQ = 221108
-    GMM_ATTACH_REQ = 241101
-    GMM_RA_UPDATE_REQ = 241108
-    IDENTITY_REQUEST_LTE = 141155
-    IDENTITY_REQUEST_WCDMA = 241115
-    IDENTITY_REQUEST_GSM = 641115
-    UE_CAPABILITY_ENQUIRY = 111167
-
-
-class TriggerMessageReply(Enum):
-    ''' Values for Trigger message reply parameter '''
-    ACCEPT = "ACCEPT"
-    REJECT = "REJECT"
-    IGNORE = "IGNORE"
-    NONE = "NONE"
-    ILLEGAL = "ILLEGAL"
-
-
-class TestProcedure(Enum):
-    ''' Values for different Test procedures in MD8475A '''
-    PROCEDURE_BL = "BL"
-    PROCEDURE_SELECTION = "SELECTION"
-    PROCEDURE_RESELECTION = "RESELECTION"
-    PROCEDURE_REDIRECTION = "REDIRECTION"
-    PROCEDURE_HO = "HO"
-    PROCEDURE_HHO = "HHO"
-    PROCEDURE_SHO = "SHO"
-    PROCEDURE_MEASUREMENT = "MEASUREMENT"
-    PROCEDURE_CELLCHANGE = "CELLCHANGE"
-    PROCEDURE_MULTICELL = "MULTICELL"
-
-
-class TestPowerControl(Enum):
-    ''' Values for power control in test procedure '''
-    POWER_CONTROL_ENABLE = "ENABLE"
-    POWER_CONTROL_DISABLE = "DISABLE"
-
-
-class TestMeasurement(Enum):
-    ''' Values for mesaurement in test procedure '''
-    MEASUREMENT_ENABLE = "ENABLE"
-    MEASUREMENT_DISABLE = "DISABLE"
-
-
-'''MD8475A processing states'''
-_PROCESS_STATES = {
-    "NONE": ProcessingStatus.PROCESS_STATUS_NONE,
-    "NOTRUN": ProcessingStatus.PROCESS_STATUS_NOTRUN,
-    "POWEROFF": ProcessingStatus.PROCESS_STATUS_POWEROFF,
-    "REGISTRATION": ProcessingStatus.PROCESS_STATUS_REGISTRATION,
-    "DETACH": ProcessingStatus.PROCESS_STATUS_DETACH,
-    "IDLE": ProcessingStatus.PROCESS_STATUS_IDLE,
-    "ORIGINATION": ProcessingStatus.PROCESS_STATUS_ORIGINATION,
-    "HANDOVER": ProcessingStatus.PROCESS_STATUS_HANDOVER,
-    "UPDATING": ProcessingStatus.PROCESS_STATUS_UPDATING,
-    "TERMINATION": ProcessingStatus.PROCESS_STATUS_TERMINATION,
-    "COMMUNICATION": ProcessingStatus.PROCESS_STATUS_COMMUNICATION,
-    "UERELEASE": ProcessingStatus.PROCESS_STATUS_UERELEASE,
-    "NWRELEASE": ProcessingStatus.PROCESS_STATUS_NWRELEASE,
-}
-
-
-class ImsCscfStatus(Enum):
-    """ MD8475A ims cscf status for UE
-    """
-    OFF = "OFF"
-    SIPIDLE = "SIPIDLE"
-    CONNECTED = "CONNECTED"
-    CALLING = "CALLING"
-    RINGING = "RINGING"
-    UNKNOWN = "UNKNOWN"
-
-
-class ImsCscfCall(Enum):
-    """ MD8475A ims cscf call action
-    """
-    MAKE = "MAKE"
-    END = "END"
-    MAKEVIDEO = "MAKEVIDEO"
-    MAKE2ND = "MAKE2ND"
-    END2ND = "END2ND"
-    ANSWER = "ANSWER"
-    HOLD = "HOLD"
-    RESUME = "RESUME"
-
-
-class VirtualPhoneStatus(IntEnum):
-    ''' MD8475A virtual phone status for UE voice and UE video
-        PPP, PWS '''
-    STATUS_IDLE = 0
-    STATUS_VOICECALL_ORIGINATION = 1
-    STATUS_VOICECALL_INCOMING = 2
-    STATUS_VOICECALL_INPROGRESS = 3
-    STATUS_VOICECALL_DISCONNECTING = 4
-    STATUS_VOICECALL_DISCONNECTED = 5
-    STATUS_VIDEOCALL_ORIGINATION = 6
-    STATUS_VIDEOCALL_INCOMING = 7
-    STATUS_VIDEOCALL_INPROGRESS = 8
-    STATUS_VIDEOCALL_DISCONNECTING = 9
-    STATUS_VIDEOCALL_DISCONNECTED = 10
-
-
-'''Virtual Phone Status '''
-_VP_STATUS = {
-    "0": VirtualPhoneStatus.STATUS_IDLE,
-    "1": VirtualPhoneStatus.STATUS_VOICECALL_ORIGINATION,
-    "2": VirtualPhoneStatus.STATUS_VOICECALL_INCOMING,
-    "3": VirtualPhoneStatus.STATUS_VOICECALL_INPROGRESS,
-    "4": VirtualPhoneStatus.STATUS_VOICECALL_DISCONNECTING,
-    "5": VirtualPhoneStatus.STATUS_VOICECALL_DISCONNECTED,
-    "6": VirtualPhoneStatus.STATUS_VIDEOCALL_ORIGINATION,
-    "7": VirtualPhoneStatus.STATUS_VIDEOCALL_INCOMING,
-    "8": VirtualPhoneStatus.STATUS_VIDEOCALL_INPROGRESS,
-    "9": VirtualPhoneStatus.STATUS_VIDEOCALL_DISCONNECTING,
-    "10": VirtualPhoneStatus.STATUS_VIDEOCALL_DISCONNECTED,
-}
-
-
-class VirtualPhoneAutoAnswer(Enum):
-    ''' Virtual phone auto answer enable values'''
-    ON = "ON"
-    OFF = "OFF"
-
-
-class CsfbType(Enum):
-    ''' CSFB Type values'''
-    CSFB_TYPE_REDIRECTION = "REDIRECTION"
-    CSFB_TYPE_HANDOVER = "HO"
-
-
-class ReturnToEUTRAN(Enum):
-    '''Return to EUTRAN setting values '''
-    RETEUTRAN_ENABLE = "ENABLE"
-    RETEUTRAN_DISABLE = "DISABLE"
-
-
-class CTCHSetup(Enum):
-    '''CTCH setting values '''
-    CTCH_ENABLE = "ENABLE"
-    CTCH_DISABLE = "DISABLE"
-
-
-class UEIdentityType(Enum):
-    '''UE Identity type values '''
-    IMSI = "IMSI"
-    IMEI = "IMEI"
-    IMEISV = "IMEISV"
-
-
-class CBCHSetup(Enum):
-    '''CBCH setting values '''
-    CBCH_ENABLE = "ENABLE"
-    CBCH_DISABLE = "DISABLE"
-
-
-class Switch(Enum):
-    ''' Values for ENABLE or DISABLE '''
-    ENABLE = "ENABLE"
-    DISABLE = "DISABLE"
-
-
-class ModulationType(Enum):
-    """Supported Modulation Types."""
-    Q16 = '16QAM'
-    Q64 = '64QAM'
-    Q256 = '256QAM'
-
-
-class MD8475A(object):
-    """Class to communicate with Anritsu MD8475A Signalling Tester.
-       This uses GPIB command to interface with Anritsu MD8475A """
-    def __init__(self, ip_address, wlan=False, md8475_version="A"):
-        self._error_reporting = True
-        self._ipaddr = ip_address
-        self.log = tracelogger.TraceLogger(logging.getLogger())
-        self._wlan = wlan
-        port_number = 28002
-        self._md8475_version = md8475_version
-        if md8475_version == "B":
-            global TERMINATOR
-            TERMINATOR = "\n"
-            port_number = 5025
-
-        # Open socket connection to Signaling Tester
-        self.log.info("Opening Socket Connection with "
-                      "Signaling Tester ({}) ".format(self._ipaddr))
-        try:
-            self._sock = socket.create_connection((self._ipaddr, port_number),
-                                                  timeout=120)
-            self.send_query("*IDN?", 60)
-            self.log.info("Communication with Signaling Tester OK.")
-            self.log.info("Opened Socket connection to ({})"
-                          "with handle ({})".format(self._ipaddr, self._sock))
-            # launching Smart Studio Application needed for the simulation
-            ret = self.launch_smartstudio()
-        except socket.timeout:
-            raise AnritsuError("Timeout happened while conencting to"
-                               " Anritsu MD8475A")
-        except socket.error:
-            raise AnritsuError("Socket creation error")
-
-    def get_BTS(self, btsnumber):
-        """ Returns the BTS object based on the BTS number provided
-
-        Args:
-            btsnumber: BTS number (BTS1, BTS2)
-
-        Returns:
-            BTS object
-        """
-        return _BaseTransceiverStation(self, btsnumber)
-
-    def get_AnritsuTestCases(self):
-        """ Returns the Anritsu Test Case Module Object
-
-        Args:
-            None
-
-        Returns:
-            Anritsu Test Case Module Object
-        """
-        return _AnritsuTestCases(self)
-
-    def get_VirtualPhone(self):
-        """ Returns the Anritsu Virtual Phone Module Object
-
-        Args:
-            None
-
-        Returns:
-            Anritsu Virtual Phone Module Object
-        """
-        return _VirtualPhone(self)
-
-    def get_PDN(self, pdn_number):
-        """ Returns the PDN Module Object
-
-        Args:
-            None
-
-        Returns:
-            Anritsu PDN Module Object
-        """
-        return _PacketDataNetwork(self, pdn_number)
-
-    def get_TriggerMessage(self):
-        """ Returns the Anritsu Trigger Message Module Object
-
-        Args:
-            None
-
-        Returns:
-            Anritsu Trigger Message Module Object
-        """
-        return _TriggerMessage(self)
-
-    def get_IMS(self, vnid):
-        """ Returns the IMS Module Object with VNID
-
-        Args:
-            vnid: Virtual Network ID
-
-        Returns:
-            Anritsu IMS VNID Module Object
-        """
-        return _IMS_Services(self, vnid)
-
-    def get_ims_cscf_status(self, virtual_network_id):
-        """ Get the IMS CSCF Status of virtual network
-
-        Args:
-            virtual_network_id: virtual network id
-
-        Returns:
-            IMS CSCF status
-        """
-        cmd = "IMSCSCFSTAT? {}".format(virtual_network_id)
-        return self.send_query(cmd)
-
-    def ims_cscf_call_action(self, virtual_network_id, action):
-        """ IMS CSCF Call action
-
-        Args:
-            virtual_network_id: virtual network id
-            action: action to make
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFCALL {},{}".format(virtual_network_id, action)
-        self.send_command(cmd)
-
-    def send_query(self, query, sock_timeout=120):
-        """ Sends a Query message to Anritsu and return response
-
-        Args:
-            query - Query string
-
-        Returns:
-            query response
-        """
-        self.log.info("--> {}".format(query))
-        querytoSend = (query + TERMINATOR).encode('utf-8')
-        self._sock.settimeout(sock_timeout)
-        try:
-            self._sock.send(querytoSend)
-            result = self._sock.recv(ANRITSU_SOCKET_BUFFER_SIZE).rstrip(
-                TERMINATOR.encode('utf-8'))
-            response = result.decode('utf-8')
-            self.log.info('<-- {}'.format(response))
-            return response
-        except socket.timeout:
-            raise AnritsuError("Timeout: Response from Anritsu")
-        except socket.error:
-            raise AnritsuError("Socket Error")
-
-    def send_command(self, command, sock_timeout=120):
-        """ Sends a Command message to Anritsu
-
-        Args:
-            command - command string
-
-        Returns:
-            None
-        """
-        self.log.info("--> {}".format(command))
-        if self._error_reporting:
-            cmdToSend = (command + ";ERROR?" + TERMINATOR).encode('utf-8')
-            self._sock.settimeout(sock_timeout)
-            try:
-                self._sock.send(cmdToSend)
-                err = self._sock.recv(ANRITSU_SOCKET_BUFFER_SIZE).rstrip(
-                    TERMINATOR.encode('utf-8'))
-                error = int(err.decode('utf-8'))
-                if error != NO_ERROR:
-                    raise AnritsuError(error, command)
-            except socket.timeout:
-                raise AnritsuError("Timeout for Command Response from Anritsu")
-            except socket.error:
-                raise AnritsuError("Socket Error for Anritsu command")
-            except Exception as e:
-                raise AnritsuError(e, command)
-        else:
-            cmdToSend = (command + TERMINATOR).encode('utf-8')
-            try:
-                self._sock.send(cmdToSend)
-            except socket.error:
-                raise AnritsuError("Socket Error", command)
-            return
-
-    def launch_smartstudio(self):
-        """ launch the Smart studio application
-            This should be done before stating simulation
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        # check the Smart Studio status . If Smart Studio doesn't exist ,
-        # start it.if it is running, stop it. Smart Studio should be in
-        # NOTRUN (Simulation Stopped) state to start new simulation
-        stat = self.send_query("STAT?", 30)
-        if stat == "NOTEXIST":
-            self.log.info("Launching Smart Studio Application,"
-                          "it takes about a minute.")
-            time_to_wait = SMARTSTUDIO_LAUNCH_WAIT_TIME
-            sleep_interval = 15
-            waiting_time = 0
-
-            err = self.send_command("RUN", SMARTSTUDIO_LAUNCH_WAIT_TIME)
-            stat = self.send_query("STAT?")
-            while stat != "NOTRUN":
-                time.sleep(sleep_interval)
-                waiting_time = waiting_time + sleep_interval
-                if waiting_time <= time_to_wait:
-                    stat = self.send_query("STAT?")
-                else:
-                    raise AnritsuError("Timeout: Smart Studio launch")
-        elif stat == "RUNNING":
-            # Stop simulation if necessary
-            self.send_command("STOP", 60)
-            stat = self.send_query("STAT?")
-
-        # The state of the Smart Studio should be NOTRUN at this point
-        # after the one of the steps from above
-        if stat != "NOTRUN":
-            self.log.info(
-                "Can not launch Smart Studio, "
-                "please shut down all the Smart Studio SW components")
-            raise AnritsuError("Could not run SmartStudio")
-
-    def close_smartstudio(self):
-        """ Closes the Smart studio application
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        self.stop_simulation()
-        self.send_command("EXIT", 60)
-
-    def get_smartstudio_status(self):
-        """ Gets the Smart studio status
-
-        Args:
-            None
-
-        Returns:
-            Smart studio status
-        """
-        return self.send_query("STAT?")
-
-    def start_simulation(self):
-        """ Starting the simulation of the network model.
-            simulation model or simulation parameter file
-            should be set before starting the simulation
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        time_to_wait = SMARTSTUDIO_SIMULATION_START_WAIT_TIME
-        sleep_interval = 2
-        waiting_time = 0
-
-        self.send_command("START", SMARTSTUDIO_SIMULATION_START_WAIT_TIME)
-
-        self.log.info("Waiting for CALLSTAT=POWEROFF")
-        callstat = self.send_query("CALLSTAT? BTS1").split(",")
-        while callstat[0] != "POWEROFF":
-            time.sleep(sleep_interval)
-            waiting_time += sleep_interval
-            if waiting_time <= time_to_wait:
-                callstat = self.send_query("CALLSTAT? BTS1").split(",")
-            else:
-                raise AnritsuError("Timeout: Starting simulation")
-
-    def stop_simulation(self):
-        """ Stop simulation operation
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        # Stop virtual network (IMS) #1 if still running
-        # this is needed before Sync command is supported in 6.40a
-        if self.send_query("IMSVNSTAT? 1") == "RUNNING":
-            self.send_command("IMSSTOPVN 1")
-        if self.send_query("IMSVNSTAT? 2") == "RUNNING":
-            self.send_command("IMSSTOPVN 2")
-        stat = self.send_query("STAT?")
-        # Stop simulation if its is RUNNING
-        if stat == "RUNNING":
-            self.send_command("STOP", 60)
-            stat = self.send_query("STAT?")
-            if stat != "NOTRUN":
-                self.log.info("Failed to stop simulation")
-                raise AnritsuError("Failed to stop simulation")
-
-    def reset(self):
-        """ reset simulation parameters
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        self.send_command("*RST", COMMAND_COMPLETE_WAIT_TIME)
-
-    def load_simulation_paramfile(self, filepath):
-        """ loads simulation model parameter file
-        Args:
-          filepath : simulation model parameter file path
-
-        Returns:
-            None
-        """
-        self.stop_simulation()
-        cmd = "LOADSIMPARAM \"" + filepath + '\";ERROR?'
-        self.send_query(cmd, LOAD_SIMULATION_PARAM_FILE_WAIT_TIME)
-
-    def load_cell_paramfile(self, filepath):
-        """ loads cell model parameter file
-
-        Args:
-          filepath : cell model parameter file path
-
-        Returns:
-            None
-        """
-        self.stop_simulation()
-        cmd = "LOADCELLPARAM \"" + filepath + '\";ERROR?'
-        status = int(self.send_query(cmd))
-        if status != NO_ERROR:
-            raise AnritsuError(status, cmd)
-
-    def _set_simulation_model(self, sim_model, reset=True):
-        """ Set simulation model and valid the configuration
-
-        Args:
-            sim_model: simulation model
-            reset: if True, reset the simulation after setting the new
-            simulation model
-        Returns:
-            True/False
-        """
-        error = int(
-            self.send_query("SIMMODEL %s;ERROR?" % sim_model,
-                            COMMAND_COMPLETE_WAIT_TIME))
-        if error:  # Try again if first set SIMMODEL fails
-            time.sleep(3)
-            if "WLAN" in sim_model:
-                new_sim_model = sim_model[:-5]
-                error = int(
-                    self.send_query("SIMMODEL %s;ERROR?" % new_sim_model,
-                                    COMMAND_COMPLETE_WAIT_TIME))
-                time.sleep(3)
-            error = int(
-                self.send_query("SIMMODEL %s;ERROR?" % sim_model,
-                                COMMAND_COMPLETE_WAIT_TIME))
-            if error:
-                return False
-        if reset:
-            # Reset might be necessary because SIMMODEL will load
-            # some of the contents from previous parameter files.
-            self.reset()
-        return True
-
-    def set_simulation_model(self, *bts_rats, reset=True):
-        """ Stops the simulation and then sets the simulation model.
-
-        Args:
-            *bts_rats: base station rats for BTS 1 to 5.
-            reset: if True, reset the simulation after setting the new
-            simulation model
-        Returns:
-            True or False
-        """
-        self.stop_simulation()
-        if len(bts_rats) not in range(1, 6):
-            raise ValueError(
-                "set_simulation_model requires 1 to 5 BTS values.")
-        simmodel = ",".join(bts_rat.value for bts_rat in bts_rats)
-        if self._wlan:
-            simmodel = simmodel + "," + "WLAN"
-        return self._set_simulation_model(simmodel, reset)
-
-    def get_simulation_model(self):
-        """ Gets the simulation model
-
-        Args:
-            None
-
-        Returns:
-            Current simulation model
-        """
-        cmd = "SIMMODEL?"
-        return self.send_query(cmd)
-
-    def get_lte_rrc_status_change(self):
-        """ Gets the LTE RRC status change function state
-
-        Returns:
-            Boolean: True is Enabled / False is Disabled
-        """
-        cmd = "L_RRCSTAT?"
-        return self.send_query(cmd) == "ENABLE"
-
-    def set_lte_rrc_status_change(self, status_change):
-        """ Enables or Disables the LTE RRC status change function
-
-        Returns:
-            None
-        """
-        cmd = "L_RRCSTAT "
-        if status_change:
-            cmd += "ENABLE"
-        else:
-            cmd += "DISABLE"
-        self.send_command(cmd)
-
-    def get_lte_rrc_status_change_timer(self):
-        """ Gets the LTE RRC Status Change Timer
-
-        Returns:
-            returns a status change timer integer value
-        """
-        cmd = "L_STATTMR?"
-        return self.send_query(cmd)
-
-    def set_lte_rrc_status_change_timer(self, time):
-        """ Sets the LTE RRC Status Change Timer parameter
-
-        Returns:
-            None
-        """
-        cmd = "L_STATTMR %s" % time
-        self.send_command(cmd)
-
-    def set_umts_rrc_status_change(self, status_change):
-        """ Enables or Disables the UMTS RRC status change function
-
-        Returns:
-            None
-        """
-        cmd = "W_RRCSTAT "
-        if status_change:
-            cmd += "ENABLE"
-        else:
-            cmd += "DISABLE"
-        self.send_command(cmd)
-
-    def get_umts_rrc_status_change(self):
-        """ Gets the UMTS RRC Status Change
-
-        Returns:
-            Boolean: True is Enabled / False is Disabled
-        """
-        cmd = "W_RRCSTAT?"
-        return self.send_query(cmd)
-
-    def set_umts_dch_stat_timer(self, timer_seconds):
-        """ Sets the UMTS RRC DCH timer
-
-        Returns:
-            None
-        """
-        cmd = "W_STATTMRDCH %s" % timer_seconds
-        self.send_command(cmd)
-
-    def set_simulation_state_to_poweroff(self):
-        """ Sets the simulation state to POWER OFF
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        self.send_command("RESETSIMULATION POWEROFF")
-        time_to_wait = 30
-        sleep_interval = 2
-        waiting_time = 0
-
-        self.log.info("Waiting for CALLSTAT=POWEROFF")
-        callstat = self.send_query("CALLSTAT?").split(",")
-        while callstat[0] != "POWEROFF":
-            time.sleep(sleep_interval)
-            waiting_time = waiting_time + sleep_interval
-            if waiting_time <= time_to_wait:
-                callstat = self.send_query("CALLSTAT?").split(",")
-            else:
-                break
-
-    def set_simulation_state_to_idle(self, btsnumber):
-        """ Sets the simulation state to IDLE
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        if not isinstance(btsnumber, BtsNumber):
-            raise ValueError(' The parameter should be of type "BtsNumber" ')
-        cmd = "RESETSIMULATION IDLE," + btsnumber.value
-        self.send_command(cmd)
-        time_to_wait = 30
-        sleep_interval = 2
-        waiting_time = 0
-
-        self.log.info("Waiting for CALLSTAT=IDLE")
-        callstat = self.send_query("CALLSTAT?").split(",")
-        while callstat[0] != "IDLE":
-            time.sleep(sleep_interval)
-            waiting_time = waiting_time + sleep_interval
-            if waiting_time <= time_to_wait:
-                callstat = self.send_query("CALLSTAT?").split(",")
-            else:
-                break
-
-    def set_trigger_message_mode(self, msg_id):
-        """ Sets the Message Mode of the trigger
-
-        Args:
-            msg_id: The hex value of the identity of an RRC/NAS message.
-
-        Returns:
-            None
-        """
-
-        if isinstance(msg_id, TriggerMessageIDs):
-            msg_id = msg_id.value
-
-        cmd = "TMMESSAGEMODE {},USERDATA".format(msg_id)
-        self.send_command(cmd)
-
-    def set_data_of_trigger_message(self, msg_id, user_data):
-        """ Sets the User Data of the trigger message
-
-        Args:
-            msg_id: The hex value of the identity of an RRC/NAS message.
-            user_data: Hex data
-
-        Returns:
-            None
-        """
-
-        if isinstance(msg_id, TriggerMessageIDs):
-            msg_id = msg_id.value
-
-        data_len = len(user_data) * 4
-
-        cmd = "TMUSERDATA {}, {}, {}".format(msg_id, user_data, data_len)
-        self.send_command(cmd)
-
-    def send_trigger_message(self, msg_id):
-        """ Sends the User Data of the trigger information
-
-        Args:
-            msg_id: The hex value of the identity of an RRC/NAS message.
-
-        Returns:
-            None
-        """
-
-        if isinstance(msg_id, TriggerMessageIDs):
-            msg_id = msg_id.value
-
-        cmd = "TMSENDUSERMSG {}".format(msg_id)
-        self.send_command(cmd)
-
-    def wait_for_registration_state(self,
-                                    bts=1,
-                                    time_to_wait=REGISTRATION_STATE_WAIT_TIME):
-        """ Waits for UE registration state on Anritsu
-
-        Args:
-          bts: index of MD8475A BTS, eg 1, 2
-          time_to_wait: time to wait for the phone to get to registration state
-
-        Returns:
-            None
-        """
-        self.log.info("wait for IDLE/COMMUNICATION state on anritsu.")
-
-        sleep_interval = 1
-        sim_model = (self.get_simulation_model()).split(",")
-        # wait 1 more round for GSM because of PS attach
-        registration_check_iterations = 2 if sim_model[bts - 1] == "GSM" else 1
-        for _ in range(registration_check_iterations):
-            waiting_time = 0
-            while waiting_time <= time_to_wait:
-                callstat = self.send_query(
-                    "CALLSTAT? BTS{}".format(bts)).split(",")
-                if callstat[0] == "IDLE" or callstat[1] == "COMMUNICATION":
-                    break
-                time.sleep(sleep_interval)
-                waiting_time += sleep_interval
-            else:
-                raise AnritsuError(
-                    "UE failed to register in {} seconds".format(time_to_wait))
-            time.sleep(sleep_interval)
-
-    def wait_for_communication_state(
-            self, time_to_wait=COMMUNICATION_STATE_WAIT_TIME):
-        """ Waits for UE communication state on Anritsu
-
-        Args:
-          time_to_wait: time to wait for the phone to get to communication state
-
-        Returns:
-            None
-        """
-        self.log.info("wait for COMMUNICATION state on anritsu")
-        sleep_interval = 1
-        waiting_time = 0
-
-        self.log.info("Waiting for CALLSTAT=COMMUNICATION")
-        callstat = self.send_query("CALLSTAT? BTS1").split(",")
-        while callstat[1] != "COMMUNICATION":
-            time.sleep(sleep_interval)
-            waiting_time += sleep_interval
-            if waiting_time <= time_to_wait:
-                callstat = self.send_query("CALLSTAT? BTS1").split(",")
-            else:
-                raise AnritsuError("UE failed to register on network")
-
-    def wait_for_idle_state(self, time_to_wait=IDLE_STATE_WAIT_TIME):
-        """ Waits for UE idle state on Anritsu
-
-        Args:
-          time_to_wait: time to wait for the phone to get to idle state
-
-        Returns:
-            None
-        """
-        self.log.info("wait for IDLE state on anritsu.")
-
-        sleep_interval = 1
-        waiting_time = 0
-
-        callstat = self.send_query("CALLSTAT? BTS1").split(",")
-        while callstat[0] != "IDLE":
-            time.sleep(sleep_interval)
-            waiting_time += sleep_interval
-            if waiting_time <= time_to_wait:
-                callstat = self.send_query("CALLSTAT? BTS1").split(",")
-            else:
-                raise AnritsuError("UE failed to go on idle state")
-
-    def get_camping_cell(self):
-        """ Gets the current camping cell information
-
-        Args:
-          None
-
-        Returns:
-            returns a tuple (BTS number, RAT Technology) '
-        """
-        bts_number, rat_info = self.send_query("CAMPINGCELL?").split(",")
-        return bts_number, rat_info
-
-    def get_supported_bands(self, rat):
-        """ Gets the supported bands from UE capability information
-
-        Args:
-          rat: LTE or WCDMA
-
-        Returns:
-            returns a list of bnads
-        """
-        cmd = "UEINFO? "
-        if rat == "LTE":
-            cmd += "L"
-        elif rat == "WCDMA":
-            cmd += "W"
-        else:
-            raise ValueError('The rat argument needs to be "LTE" or "WCDMA"')
-        cmd += "_SupportedBand"
-        result = self.send_query(cmd).split(",")
-        if result == "NONE":
-            return None
-        if rat == "WCDMA":
-            bands = []
-            for band in result:
-                bands.append(WCDMA_BANDS[band])
-            return bands
-        else:
-            return result
-
-    def start_testcase(self):
-        """ Starts a test case on Anritsu
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        self.send_command("STARTTEST")
-
-    def get_testcase_status(self):
-        """ Gets the current test case status on Anritsu
-
-        Args:
-          None
-
-        Returns:
-            current test case status
-        """
-        return self.send_query("TESTSTAT?")
-
-    def start_ip_traffic(self, pdn='1'):
-        """ Starts IP data traffic with the selected PDN.
-
-        Args:
-            pdn: the pdn to be used for data traffic. Defaults to '1'.
-        """
-        self.send_command('OPERATEIPTRAFFIC START,' + pdn)
-
-    def stop_ip_traffic(self, pdn='1'):
-        """ Stops IP data traffic with the selected PDN.
-
-         Args:
-            pdn: pdn for which data traffic has to be stopped. Defaults to '1'.
-        """
-        self.send_command('OPERATEIPTRAFFIC STOP,' + pdn)
-
-    def set_carrier_aggregation_enabled(self, enabled=True):
-        """ Enables or disables de carrier aggregation option.
-
-        Args:
-            enabled: enables CA if True and disables CA if False.
-        """
-        cmd = 'CA ' + 'ENABLE' if enabled else 'DISABLE'
-        self.send_command(cmd)
-
-    # Common Default Gateway:
-    @property
-    def gateway_ipv4addr(self):
-        """ Gets the IPv4 address of the default gateway
-
-        Args:
-          None
-
-        Returns:
-            current UE status
-        """
-        return self.send_query("DGIPV4?")
-
-    @gateway_ipv4addr.setter
-    def gateway_ipv4addr(self, ipv4_addr):
-        """ sets the IPv4 address of the default gateway
-        Args:
-            ipv4_addr: IPv4 address of the default gateway
-
-        Returns:
-            None
-        """
-        cmd = "DGIPV4 " + ipv4_addr
-        self.send_command(cmd)
-
-    @property
-    def gateway_ipv6addr(self):
-        """ Gets the IPv6 address of the default gateway
-
-        Args:
-          None
-
-        Returns:
-            current UE status
-        """
-        return self.send_query("DGIPV6?")
-
-    @gateway_ipv6addr.setter
-    def gateway_ipv6addr(self, ipv6_addr):
-        """ sets the IPv6 address of the default gateway
-        Args:
-            ipv6_addr: IPv6 address of the default gateway
-
-        Returns:
-            None
-        """
-        cmd = "DGIPV6 " + ipv6_addr
-        self.send_command(cmd)
-
-    @property
-    def usim_key(self):
-        """ Gets the USIM Security Key
-
-        Args:
-          None
-
-        Returns:
-            USIM Security Key
-        """
-        return self.send_query("USIMK?")
-
-    @usim_key.setter
-    def usim_key(self, usimk):
-        """ sets the USIM Security Key
-        Args:
-            usimk: USIM Security Key, eg "000102030405060708090A0B0C0D0E0F"
-
-        Returns:
-            None
-        """
-        cmd = "USIMK " + usimk
-        self.send_command(cmd)
-
-    def get_ue_status(self):
-        """ Gets the current UE status on Anritsu
-
-        Args:
-          None
-
-        Returns:
-            current UE status
-        """
-        UE_STATUS_INDEX = 0
-        ue_status = self.send_query("CALLSTAT?").split(",")[UE_STATUS_INDEX]
-        return _PROCESS_STATES[ue_status]
-
-    def get_packet_status(self):
-        """ Gets the current Packet status on Anritsu
-
-        Args:
-          None
-
-        Returns:
-            current Packet status
-        """
-        PACKET_STATUS_INDEX = 1
-        packet_status = self.send_query("CALLSTAT?").split(
-            ",")[PACKET_STATUS_INDEX]
-        return _PROCESS_STATES[packet_status]
-
-    def disconnect(self):
-        """ Disconnect the Anritsu box from test PC
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        # no need to # exit smart studio application
-        # self.close_smartstudio()
-        self._sock.close()
-
-    def machine_reboot(self):
-        """ Reboots the Anritsu Machine
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        self.send_command("REBOOT")
-
-    def save_sequence_log(self, fileName):
-        """ Saves the Anritsu Sequence logs to file
-
-        Args:
-          fileName: log file name
-
-        Returns:
-            None
-        """
-        cmd = 'SAVESEQLOG "{}"'.format(fileName)
-        self.send_command(cmd)
-
-    def clear_sequence_log(self):
-        """ Clears the Anritsu Sequence logs
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        self.send_command("CLEARSEQLOG")
-
-    def save_message_log(self, fileName):
-        """ Saves the Anritsu Message logs to file
-
-        Args:
-          fileName: log file name
-
-        Returns:
-            None
-        """
-        cmd = 'SAVEMSGLOG "{}"'.format(fileName)
-        self.send_command(cmd)
-
-    def clear_message_log(self):
-        """ Clears the Anritsu Message logs
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        self.send_command("CLEARMSGLOG")
-
-    def save_trace_log(self, fileName, fileType, overwrite, start, end):
-        """ Saves the Anritsu Trace logs
-
-        Args:
-          fileName: log file name
-          fileType: file type (BINARY, TEXT, H245,PACKET, CPLABE)
-          overwrite: whether to over write
-          start: starting trace number
-          end: ending trace number
-
-        Returns:
-            None
-        """
-        cmd = 'SAVETRACELOG "{}",{},{},{},{}'.format(fileName, fileType,
-                                                     overwrite, start, end)
-        self.send_command(cmd)
-
-    def send_cmas_lte_wcdma(self, serialNo, messageID, warningMessage):
-        """ Sends a CMAS message
-
-        Args:
-          serialNo: serial number of CMAS message
-          messageID: CMAS message ID
-          warningMessage:  CMAS Warning message
-
-        Returns:
-            None
-        """
-        cmd = ('PWSSENDWM 3GPP,"BtsNo=1&WarningSystem=CMAS&SerialNo={}'
-               '&MessageID={}&wm={}"').format(serialNo, messageID,
-                                              warningMessage)
-        self.send_command(cmd)
-
-    def send_etws_lte_wcdma(self, serialNo, messageID, warningType,
-                            warningMessage, userAlertenable, popUpEnable):
-        """ Sends a ETWS message
-
-        Args:
-          serialNo: serial number of CMAS message
-          messageID: CMAS message ID
-          warningMessage:  CMAS Warning message
-
-        Returns:
-            None
-        """
-        cmd = (
-            'PWSSENDWM 3GPP,"BtsNo=1&WarningSystem=ETWS&SerialNo={}&'
-            'Primary=ON&PrimaryMessageID={}&Secondary=ON&SecondaryMessageID={}'
-            '&WarningType={}&wm={}&UserAlert={}&Popup={}&dcs=0x10&LanguageCode=en"'
-        ).format(serialNo, messageID, messageID, warningType, warningMessage,
-                 userAlertenable, popUpEnable)
-        self.send_command(cmd)
-
-    def send_cmas_etws_cdma1x(self, message_id, service_category, alert_ext,
-                              response_type, severity, urgency, certainty):
-        """ Sends a CMAS/ETWS message on CDMA 1X
-
-        Args:
-          serviceCategory: service category of alert
-          messageID: message ID
-          alertText: Warning message
-
-        Returns:
-            None
-        """
-        cmd = (
-            'PWSSENDWM 3GPP2,"BtsNo=1&ServiceCategory={}&MessageID={}&AlertText={}&'
-            'CharSet=ASCII&ResponseType={}&Severity={}&Urgency={}&Certainty={}"'
-        ).format(service_category, message_id, alert_ext, response_type,
-                 severity, urgency, certainty)
-        self.send_command(cmd)
-
-    @property
-    def csfb_type(self):
-        """ Gets the current CSFB type
-
-        Args:
-            None
-
-        Returns:
-            current CSFB type
-        """
-        return self.send_query("SIMMODELEX? CSFB")
-
-    @csfb_type.setter
-    def csfb_type(self, csfb_type):
-        """ sets the CSFB type
-        Args:
-            csfb_type: CSFB type
-
-        Returns:
-            None
-        """
-        if not isinstance(csfb_type, CsfbType):
-            raise ValueError('The parameter should be of type "CsfbType" ')
-        cmd = "SIMMODELEX CSFB," + csfb_type.value
-        self.send_command(cmd)
-
-    @property
-    def csfb_return_to_eutran(self):
-        """ Gets the current return to EUTRAN status
-
-        Args:
-            None
-
-        Returns:
-            current return to EUTRAN status
-        """
-        return self.send_query("SIMMODELEX? RETEUTRAN")
-
-    @csfb_return_to_eutran.setter
-    def csfb_return_to_eutran(self, enable):
-        """ sets the return to EUTRAN feature
-        Args:
-            enable: enable/disable return to EUTRAN feature
-
-        Returns:
-            None
-        """
-        if not isinstance(enable, ReturnToEUTRAN):
-            raise ValueError(
-                'The parameter should be of type "ReturnToEUTRAN"')
-        cmd = "SIMMODELEX RETEUTRAN," + enable.value
-        self.send_command(cmd)
-
-    def set_packet_preservation(self):
-        """ Set packet state to Preservation
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "OPERATEPACKET PRESERVATION"
-        self.send_command(cmd)
-
-    def set_packet_dormant(self):
-        """ Set packet state to Dormant
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "OPERATEPACKET DORMANT"
-        self.send_command(cmd)
-
-    def get_ue_identity(self, identity_type):
-        """ Get the UE identity IMSI, IMEI, IMEISV
-
-        Args:
-            identity_type : IMSI/IMEI/IMEISV
-
-        Returns:
-            IMSI/IMEI/IMEISV value
-        """
-        bts, rat = self.get_camping_cell()
-        if rat == BtsTechnology.LTE.value:
-            identity_request = TriggerMessageIDs.IDENTITY_REQUEST_LTE.value
-            if identity_type == UEIdentityType.IMSI:
-                userdata = IMSI_READ_USERDATA_LTE
-            elif identity_type == UEIdentityType.IMEI:
-                userdata = IMEI_READ_USERDATA_LTE
-            elif identity_type == UEIdentityType.IMEISV:
-                userdata = IMEISV_READ_USERDATA_LTE
-            else:
-                return None
-        elif rat == BtsTechnology.WCDMA.value:
-            identity_request = TriggerMessageIDs.IDENTITY_REQUEST_WCDMA.value
-            if identity_type == UEIdentityType.IMSI:
-                userdata = IMSI_READ_USERDATA_WCDMA
-            elif identity_type == UEIdentityType.IMEI:
-                userdata = IMEI_READ_USERDATA_WCDMA
-            elif identity_type == UEIdentityType.IMEISV:
-                userdata = IMEISV_READ_USERDATA_WCDMA
-            else:
-                return None
-        elif rat == BtsTechnology.GSM.value:
-            identity_request = TriggerMessageIDs.IDENTITY_REQUEST_GSM.value
-            if identity_type == UEIdentityType.IMSI:
-                userdata = IMSI_READ_USERDATA_GSM
-            elif identity_type == UEIdentityType.IMEI:
-                userdata = IMEI_READ_USERDATA_GSM
-            elif identity_type == UEIdentityType.IMEISV:
-                userdata = IMEISV_READ_USERDATA_GSM
-            else:
-                return None
-        else:
-            return None
-
-        self.send_command("TMMESSAGEMODE {},USERDATA".format(identity_request))
-        time.sleep(SETTLING_TIME)
-        self.send_command("TMUSERDATA {}, {}, {}".format(
-            identity_request, userdata, IDENTITY_REQ_DATA_LEN))
-        time.sleep(SETTLING_TIME)
-        self.send_command("TMSENDUSERMSG {}".format(identity_request))
-        time.sleep(WAIT_TIME_IDENTITY_RESPONSE)
-        # Go through sequence log and find the identity response message
-        target = '"{}"'.format(identity_type.value)
-        seqlog = self.send_query("SEQLOG?").split(",")
-        while (target not in seqlog):
-            index = int(seqlog[0]) - 1
-            if index < SEQ_LOG_MESSAGE_START_INDEX:
-                self.log.error("Can not find " + target)
-                return None
-            seqlog = self.send_query("SEQLOG? %d" % index).split(",")
-        return (seqlog[-1])
-
-    def trigger_ue_capability_enquiry(self, requested_bands):
-        """ Triggers LTE RRC UE capability enquiry from callbox.
-
-        Args:
-            requested_bands: User data in hex format
-        """
-        self.set_trigger_message_mode(TriggerMessageIDs.UE_CAPABILITY_ENQUIRY)
-        time.sleep(SETTLING_TIME)
-        self.set_data_of_trigger_message(
-            TriggerMessageIDs.UE_CAPABILITY_ENQUIRY, requested_bands)
-        time.sleep(SETTLING_TIME)
-        self.send_trigger_message(TriggerMessageIDs.UE_CAPABILITY_ENQUIRY)
-        time.sleep(SETTLING_TIME)
-
-    def get_measured_pusch_power(self):
-        """ Queries the PUSCH power.
-
-        Returns:
-            A string indicating PUSCH power in each input port.
-        """
-        return self.send_query("MONITOR? UL_PUSCH")
-
-    def select_usim(self, usim):
-        """ Select pre-defined Anritsu USIM models
-
-        Args:
-            usim: any of P0035Bx, P0135Ax, P0250Ax, P0260Ax
-
-        Returns:
-            None
-        """
-        cmd = "SELECTUSIM {}".format(usim)
-        self.send_command(cmd)
-
-
-class _AnritsuTestCases(object):
-    '''Class to interact with the MD8475 supported test procedures '''
-    def __init__(self, anritsu):
-        self._anritsu = anritsu
-        self.log = anritsu.log
-
-    @property
-    def procedure(self):
-        """ Gets the current Test Procedure type
-
-        Args:
-            None
-
-        Returns:
-            One of TestProcedure type values
-        """
-        return self._anritsu.send_query("TESTPROCEDURE?")
-
-    @procedure.setter
-    def procedure(self, procedure):
-        """ sets the Test Procedure type
-        Args:
-            procedure: One of TestProcedure type values
-
-        Returns:
-            None
-        """
-        if not isinstance(procedure, TestProcedure):
-            raise ValueError(
-                'The parameter should be of type "TestProcedure" ')
-        cmd = "TESTPROCEDURE " + procedure.value
-        self._anritsu.send_command(cmd)
-
-    @property
-    def bts_direction(self):
-        """ Gets the current Test direction
-
-         Args:
-            None
-
-        Returns:
-            Current Test direction eg:BTS2,BTS1
-        """
-        return self._anritsu.send_query("TESTBTSDIRECTION?")
-
-    @bts_direction.setter
-    def bts_direction(self, direction):
-        """ sets the Test direction  eg: BTS1 to BTS2 '''
-
-        Args:
-            direction: tuple (from-bts,to_bts) of type BtsNumber
-
-        Returns:
-            None
-        """
-        if not isinstance(direction, tuple) or len(direction) != 2:
-            raise ValueError("Pass a tuple with two items")
-        from_bts, to_bts = direction
-        if (isinstance(from_bts, BtsNumber) and isinstance(to_bts, BtsNumber)):
-            cmd = "TESTBTSDIRECTION {},{}".format(from_bts.value, to_bts.value)
-            self._anritsu.send_command(cmd)
-        else:
-            raise ValueError(' The parameters should be of type "BtsNumber" ')
-
-    @property
-    def registration_timeout(self):
-        """ Gets the current Test registration timeout
-
-        Args:
-            None
-
-        Returns:
-            Current test registration timeout value
-        """
-        return self._anritsu.send_query("TESTREGISTRATIONTIMEOUT?")
-
-    @registration_timeout.setter
-    def registration_timeout(self, timeout_value):
-        """ sets the Test registration timeout value
-        Args:
-            timeout_value: test registration timeout value
-
-        Returns:
-            None
-        """
-        cmd = "TESTREGISTRATIONTIMEOUT " + str(timeout_value)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def power_control(self):
-        """ Gets the power control enabled/disabled status for test case
-
-        Args:
-            None
-
-        Returns:
-            current power control enabled/disabled status
-        """
-        return self._anritsu.send_query("TESTPOWERCONTROL?")
-
-    @power_control.setter
-    def power_control(self, enable):
-        """ Sets the power control enabled/disabled status for test case
-
-        Args:
-            enable:  enabled/disabled
-
-        Returns:
-            None
-        """
-        if not isinstance(enable, TestPowerControl):
-            raise ValueError(' The parameter should be of type'
-                             ' "TestPowerControl" ')
-        cmd = "TESTPOWERCONTROL " + enable.value
-        self._anritsu.send_command(cmd)
-
-    @property
-    def measurement_LTE(self):
-        """ Checks measurement status for LTE test case
-
-        Args:
-            None
-
-        Returns:
-            Enabled/Disabled
-        """
-        return self._anritsu.send_query("TESTMEASUREMENT? LTE")
-
-    @measurement_LTE.setter
-    def measurement_LTE(self, enable):
-        """ Sets the measurement enabled/disabled status for LTE test case
-
-        Args:
-            enable:  enabled/disabled
-
-        Returns:
-            None
-        """
-        if not isinstance(enable, TestMeasurement):
-            raise ValueError(' The parameter should be of type'
-                             ' "TestMeasurement" ')
-        cmd = "TESTMEASUREMENT LTE," + enable.value
-        self._anritsu.send_command(cmd)
-
-    @property
-    def measurement_WCDMA(self):
-        """ Checks measurement status for WCDMA test case
-
-        Args:
-            None
-
-        Returns:
-            Enabled/Disabled
-        """
-        return self._anritsu.send_query("TESTMEASUREMENT? WCDMA")
-
-    @measurement_WCDMA.setter
-    def measurement_WCDMA(self, enable):
-        """ Sets the measurement enabled/disabled status for WCDMA test case
-
-        Args:
-            enable:  enabled/disabled
-
-        Returns:
-            None
-        """
-        if not isinstance(enable, TestMeasurement):
-            raise ValueError(' The parameter should be of type'
-                             ' "TestMeasurement" ')
-        cmd = "TESTMEASUREMENT WCDMA," + enable.value
-        self._anritsu.send_command(cmd)
-
-    @property
-    def measurement_TDSCDMA(self):
-        """ Checks measurement status for TDSCDMA test case
-
-        Args:
-            None
-
-        Returns:
-            Enabled/Disabled
-        """
-        return self._anritsu.send_query("TESTMEASUREMENT? TDSCDMA")
-
-    @measurement_TDSCDMA.setter
-    def measurement_WCDMA(self, enable):
-        """ Sets the measurement enabled/disabled status for TDSCDMA test case
-
-        Args:
-            enable:  enabled/disabled
-
-        Returns:
-            None
-        """
-        if not isinstance(enable, TestMeasurement):
-            raise ValueError(' The parameter should be of type'
-                             ' "TestMeasurement" ')
-        cmd = "TESTMEASUREMENT TDSCDMA," + enable.value
-        self._anritsu.send_command(cmd)
-
-    def set_pdn_targeteps(self, pdn_order, pdn_number=1):
-        """ Sets PDN to connect as a target when performing the
-           test case for packet handover
-
-        Args:
-            pdn_order:  PRIORITY/USER
-            pdn_number: Target PDN number
-
-        Returns:
-            None
-        """
-        cmd = "TESTPDNTARGETEPS " + pdn_order
-        if pdn_order == "USER":
-            cmd = cmd + "," + str(pdn_number)
-        self._anritsu.send_command(cmd)
-
-
-class _BaseTransceiverStation(object):
-    '''Class to interact different BTS supported by MD8475 '''
-    def __init__(self, anritsu, btsnumber):
-        if not isinstance(btsnumber, BtsNumber):
-            raise ValueError(' The parameter should be of type "BtsNumber" ')
-        self._bts_number = btsnumber.value
-        self._anritsu = anritsu
-        self.log = anritsu.log
-
-    @property
-    def output_level(self):
-        """ Gets the Downlink power of the cell
-
-        Args:
-            None
-
-        Returns:
-            DL Power level
-        """
-        cmd = "OLVL? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @output_level.setter
-    def output_level(self, level):
-        """ Sets the Downlink power of the cell
-
-        Args:
-            level: Power level
-
-        Returns:
-            None
-        """
-        counter = 1
-        while float(level) != float(self.output_level):
-            if counter > 3:
-                raise AnritsuError("Fail to set output level in 3 tries!")
-            cmd = "OLVL {},{}".format(level, self._bts_number)
-            self._anritsu.send_command(cmd)
-            counter += 1
-            time.sleep(1)
-
-    @property
-    def input_level(self):
-        """ Gets the reference power of the cell
-
-        Args:
-            None
-
-        Returns:
-            Reference Power level
-        """
-        cmd = "RFLVL? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @input_level.setter
-    def input_level(self, level):
-        """ Sets the reference power of the cell
-
-        Args:
-            level: Power level
-
-        Returns:
-            None
-        """
-        counter = 1
-        while float(level) != float(self.input_level):
-            if counter > 3:
-                raise AnritsuError("Fail to set intput level in 3 tries!")
-            cmd = "RFLVL {},{}".format(level, self._bts_number)
-            self._anritsu.send_command(cmd)
-            counter += 1
-            time.sleep(1)
-
-    @property
-    def band(self):
-        """ Gets the Band of the cell
-
-        Args:
-            None
-
-        Returns:
-            Cell band
-        """
-        cmd = "BAND? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @band.setter
-    def band(self, band):
-        """ Sets the Band of the cell
-
-        Args:
-            band: Band of the cell
-
-        Returns:
-            None
-        """
-        cmd = "BAND {},{}".format(band, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def transmode(self):
-        """ Gets the Transmission Mode of the cell
-
-        Args:
-            None
-
-        Returns:
-            Transmission mode
-        """
-        cmd = "TRANSMODE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @transmode.setter
-    def transmode(self, tm_mode):
-        """ Sets the TM of the cell
-
-        Args:
-            TM: TM of the cell
-
-        Returns:
-            None
-        """
-        cmd = "TRANSMODE {},{}".format(tm_mode, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def duplex_mode(self):
-        """ Gets the Duplex Mode of the cell
-
-        Args:
-            None
-
-        Returns:
-            Duplex mode
-        """
-        cmd = "DUPLEXMODE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @duplex_mode.setter
-    def duplex_mode(self, mode):
-        """ Sets the duplex mode for the cell
-
-        Args:
-            mode: string indicating FDD or TDD
-
-        Returns:
-            None
-        """
-        cmd = "DUPLEXMODE {},{}".format(mode, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def uldl_configuration(self):
-        """ Gets the UL/DL pattern configuration for TDD bands
-
-        Args:
-            None
-
-        Returns:
-            Configuration number
-        """
-        cmd = "ULDLCONFIGURATION? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @uldl_configuration.setter
-    def uldl_configuration(self, configuration):
-        """ Sets the UL/DL pattern configuration for TDD bands
-
-        Args:
-            configuration: configuration number, [ 0, 6 ] inclusive
-
-        Returns:
-            None
-
-        Raises:
-            ValueError: Frame structure has to be [ 0, 6 ] inclusive
-        """
-        if configuration not in range(0, 7):
-            raise ValueError("The frame structure configuration has to be a "
-                             "number between 0 and 6 inclusive")
-
-        cmd = "ULDLCONFIGURATION {},{}".format(configuration, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cfi(self):
-        """ Gets the Control Format Indicator for this base station.
-
-        Args:
-            None
-
-        Returns:
-            The CFI number.
-        """
-        cmd = "CFI? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @cfi.setter
-    def cfi(self, cfi):
-        """ Sets the Control Format Indicator for this base station.
-
-        Args:
-            cfi: one of BESTEFFORT, AUTO, 1, 2 or 3.
-
-        Returns:
-            None
-
-        Raises:
-            ValueError: if cfi's value is invalid
-        """
-
-        cfi = str(cfi)
-
-        valid_values = {'BESTEFFORT', 'AUTO', '1', '2', '3'}
-        if cfi not in valid_values:
-            raise ValueError('Valid values for CFI are %r' % valid_values)
-
-        cmd = "CFI {},{}".format(cfi, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def paging_duration(self):
-        """ Gets the paging cycle duration for this base station.
-
-        Args:
-            None
-
-        Returns:
-            The paging cycle duration in milliseconds.
-        """
-        cmd = "PCYCLE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @paging_duration.setter
-    def paging_duration(self, duration):
-        """ Sets the paging cycle duration for this base station.
-
-        Args:
-            duration: the paging cycle duration in milliseconds.
-
-        Returns:
-            None
-
-        Raises:
-            ValueError: if duration's value is invalid
-        """
-
-        duration = int(duration)
-
-        valid_values = {320, 640, 1280, 2560}
-        if duration not in valid_values:
-            raise ValueError('Valid values for the paging cycle duration are '
-                             '%r.' % valid_values)
-
-        cmd = "PCYCLE {},{}".format(duration, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def phich_resource(self):
-        """ Gets the PHICH Resource setting for this base station.
-
-        Args:
-            None
-
-        Returns:
-            The PHICH Resource setting.
-        """
-        cmd = "PHICHRESOURCE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @phich_resource.setter
-    def phich_resource(self, phich):
-        """ Sets the PHICH Resource setting for this base station.
-
-        Args:
-            phich: one of 1/6, 1/2, 1, 2.
-
-        Returns:
-            None
-
-        Raises:
-            ValueError: if phich's value is invalid
-        """
-
-        phich = str(phich)
-
-        valid_values = ['1/6', '1/2', '1', '2']
-        if phich not in valid_values:
-            raise ValueError('Valid values for PHICH Resource are %r' %
-                             valid_values)
-
-        cmd = "PHICHRESOURCE {},{}".format(phich, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def tdd_special_subframe(self):
-        """ Gets SPECIALSUBFRAME of cell.
-
-        Args:
-            None
-
-        Returns:
-            tdd_special_subframe: integer between 0,9 inclusive
-        """
-        cmd = "SPECIALSUBFRAME? " + self._bts_number
-        tdd_special_subframe = int(self._anritsu.send_query(cmd))
-        return tdd_special_subframe
-
-    @tdd_special_subframe.setter
-    def tdd_special_subframe(self, tdd_special_subframe):
-        """ Sets SPECIALSUBFRAME of cell.
-
-        Args:
-            tdd_special_subframe: int between 0,9 inclusive
-
-        Returns:
-            None
-
-        Raises:
-            ValueError: tdd_special_subframe has to be between 0,9 inclusive
-        """
-        if tdd_special_subframe not in range(0, 10):
-            raise ValueError("The special subframe config is not [0,9]")
-        cmd = "SPECIALSUBFRAME {},{}".format(tdd_special_subframe,
-                                             self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def dl_antenna(self):
-        """ Gets the DL ANTENNA count of the cell
-
-        Args:
-            None
-
-        Returns:
-            No of DL Antenna
-        """
-        cmd = "ANTENNAS? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @dl_antenna.setter
-    def dl_antenna(self, num_antenna):
-        """ Sets the DL ANTENNA of the cell
-
-        Args:
-            c: DL ANTENNA of the cell
-
-        Returns:
-            None
-        """
-        cmd = "ANTENNAS {},{}".format(num_antenna, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def bandwidth(self):
-        """ Gets the channel bandwidth of the cell
-
-        Args:
-            None
-
-        Returns:
-            channel bandwidth
-        """
-        cmd = "BANDWIDTH? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @bandwidth.setter
-    def bandwidth(self, bandwidth):
-        """ Sets the channel bandwidth of the cell
-
-        Args:
-            bandwidth: channel bandwidth  of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(bandwidth, BtsBandwidth):
-            raise ValueError(' The parameter should be of type "BtsBandwidth"')
-        cmd = "BANDWIDTH {},{}".format(bandwidth.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def dl_bandwidth(self):
-        """ Gets the downlink bandwidth of the cell
-
-        Args:
-            None
-
-        Returns:
-            downlink bandwidth
-        """
-        cmd = "DLBANDWIDTH? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @dl_bandwidth.setter
-    def dl_bandwidth(self, bandwidth):
-        """ Sets the downlink bandwidth of the cell
-
-        Args:
-            bandwidth: downlink bandwidth of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(bandwidth, BtsBandwidth):
-            raise ValueError(' The parameter should be of type "BtsBandwidth"')
-        cmd = "DLBANDWIDTH {},{}".format(bandwidth.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def ul_bandwidth(self):
-        """ Gets the uplink bandwidth of the cell
-
-        Args:
-            None
-
-        Returns:
-            uplink bandwidth
-        """
-        cmd = "ULBANDWIDTH? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @ul_bandwidth.setter
-    def ul_bandwidth(self, bandwidth):
-        """ Sets the uplink bandwidth of the cell
-
-        Args:
-            bandwidth: uplink bandwidth of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(bandwidth, BtsBandwidth):
-            raise ValueError(
-                ' The parameter should be of type "BtsBandwidth" ')
-        cmd = "ULBANDWIDTH {},{}".format(bandwidth.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def packet_rate(self):
-        """ Gets the packet rate of the cell
-
-        Args:
-            None
-
-        Returns:
-            packet rate
-        """
-        cmd = "PACKETRATE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @packet_rate.setter
-    def packet_rate(self, packetrate):
-        """ Sets the packet rate of the cell
-
-        Args:
-            packetrate: packet rate of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(packetrate, BtsPacketRate):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsPacketRate" ')
-        cmd = "PACKETRATE {},{}".format(packetrate.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def ul_windowsize(self):
-        """ Gets the uplink window size of the cell
-
-        Args:
-            None
-
-        Returns:
-            uplink window size
-        """
-        cmd = "ULWINSIZE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @ul_windowsize.setter
-    def ul_windowsize(self, windowsize):
-        """ Sets the uplink window size of the cell
-
-        Args:
-            windowsize: uplink window size of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(windowsize, BtsPacketWindowSize):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsPacketWindowSize" ')
-        cmd = "ULWINSIZE {},{}".format(windowsize.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def dl_windowsize(self):
-        """ Gets the downlink window size of the cell
-
-        Args:
-            None
-
-        Returns:
-            downlink window size
-        """
-        cmd = "DLWINSIZE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @dl_windowsize.setter
-    def dl_windowsize(self, windowsize):
-        """ Sets the downlink window size of the cell
-
-        Args:
-            windowsize: downlink window size of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(windowsize, BtsPacketWindowSize):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsPacketWindowSize" ')
-        cmd = "DLWINSIZE {},{}".format(windowsize.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def service_state(self):
-        """ Gets the service state of BTS
-
-        Args:
-            None
-
-        Returns:
-            service state IN/OUT
-        """
-        cmd = "OUTOFSERVICE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @service_state.setter
-    def service_state(self, service_state):
-        """ Sets the service state of BTS
-
-        Args:
-            service_state: service state of BTS , IN/OUT
-
-        Returns:
-            None
-        """
-        if not isinstance(service_state, BtsServiceState):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsServiceState" ')
-        cmd = "OUTOFSERVICE {},{}".format(service_state.value,
-                                          self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cell_barred(self):
-        """ Gets the Cell Barred state of the cell
-
-        Args:
-            None
-
-        Returns:
-            one of BtsCellBarred value
-        """
-        cmd = "CELLBARRED?" + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @cell_barred.setter
-    def cell_barred(self, barred_option):
-        """ Sets the Cell Barred state of the cell
-
-        Args:
-            barred_option: Cell Barred state of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(barred_option, BtsCellBarred):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsCellBarred" ')
-        cmd = "CELLBARRED {},{}".format(barred_option.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def accessclass_barred(self):
-        """ Gets the Access Class Barred state of the cell
-
-        Args:
-            None
-
-        Returns:
-            one of BtsAccessClassBarred value
-        """
-        cmd = "ACBARRED? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @accessclass_barred.setter
-    def accessclass_barred(self, barred_option):
-        """ Sets the Access Class Barred state of the cell
-
-        Args:
-            barred_option: Access Class Barred state of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(barred_option, BtsAccessClassBarred):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsAccessClassBarred" ')
-        cmd = "ACBARRED {},{}".format(barred_option.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def lteemergency_ac_barred(self):
-        """ Gets the LTE emergency Access Class Barred state of the cell
-
-        Args:
-            None
-
-        Returns:
-            one of BtsLteEmergencyAccessClassBarred value
-        """
-        cmd = "LTEEMERGENCYACBARRED? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @lteemergency_ac_barred.setter
-    def lteemergency_ac_barred(self, barred_option):
-        """ Sets the LTE emergency Access Class Barred state of the cell
-
-        Args:
-            barred_option: Access Class Barred state of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(barred_option, BtsLteEmergencyAccessClassBarred):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsLteEmergencyAccessClassBarred" ')
-        cmd = "LTEEMERGENCYACBARRED {},{}".format(barred_option.value,
-                                                  self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def mcc(self):
-        """ Gets the MCC of the cell
-
-        Args:
-            None
-
-        Returns:
-            MCC of the cell
-        """
-        cmd = "MCC? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @mcc.setter
-    def mcc(self, mcc_code):
-        """ Sets the MCC of the cell
-
-        Args:
-            mcc_code: MCC of the cell
-
-        Returns:
-            None
-        """
-        cmd = "MCC {},{}".format(mcc_code, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def mnc(self):
-        """ Gets the MNC of the cell
-
-        Args:
-            None
-
-        Returns:
-            MNC of the cell
-        """
-        cmd = "MNC? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @mnc.setter
-    def mnc(self, mnc_code):
-        """ Sets the MNC of the cell
-
-        Args:
-            mnc_code: MNC of the cell
-
-        Returns:
-            None
-        """
-        cmd = "MNC {},{}".format(mnc_code, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def nw_fullname_enable(self):
-        """ Gets the network full name enable status
-
-        Args:
-            None
-
-        Returns:
-            one of BtsNwNameEnable value
-        """
-        cmd = "NWFNAMEON? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @nw_fullname_enable.setter
-    def nw_fullname_enable(self, enable):
-        """ Sets the network full name enable status
-
-        Args:
-            enable: network full name enable status
-
-        Returns:
-            None
-        """
-        if not isinstance(enable, BtsNwNameEnable):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsNwNameEnable" ')
-        cmd = "NWFNAMEON {},{}".format(enable.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def nw_fullname(self):
-        """ Gets the network full name
-
-        Args:
-            None
-
-        Returns:
-            Network fulll name
-        """
-        cmd = "NWFNAME? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @nw_fullname.setter
-    def nw_fullname(self, fullname):
-        """ Sets the network full name
-
-        Args:
-            fullname: network full name
-
-        Returns:
-            None
-        """
-        cmd = "NWFNAME {},{}".format(fullname, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def nw_shortname_enable(self):
-        """ Gets the network short name enable status
-
-        Args:
-            None
-
-        Returns:
-            one of BtsNwNameEnable value
-        """
-        cmd = "NWSNAMEON? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @nw_shortname_enable.setter
-    def nw_shortname_enable(self, enable):
-        """ Sets the network short name enable status
-
-        Args:
-            enable: network short name enable status
-
-        Returns:
-            None
-        """
-        if not isinstance(enable, BtsNwNameEnable):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsNwNameEnable" ')
-        cmd = "NWSNAMEON {},{}".format(enable.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def nw_shortname(self):
-        """ Gets the network short name
-
-        Args:
-            None
-
-        Returns:
-            Network short name
-        """
-        cmd = "NWSNAME? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @nw_shortname.setter
-    def nw_shortname(self, shortname):
-        """ Sets the network short name
-
-        Args:
-            shortname: network short name
-
-        Returns:
-            None
-        """
-        cmd = "NWSNAME {},{}".format(shortname, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    def apply_parameter_changes(self):
-        """ apply the parameter changes at run time
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "APPLYPARAM"
-        self._anritsu.send_command(cmd)
-
-    @property
-    def wcdma_ctch(self):
-        """ Gets the WCDMA CTCH enable/disable status
-
-        Args:
-            None
-
-        Returns:
-            one of CTCHSetup values
-        """
-        cmd = "CTCHPARAMSETUP? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @wcdma_ctch.setter
-    def wcdma_ctch(self, enable):
-        """ Sets the WCDMA CTCH enable/disable status
-
-        Args:
-            enable: WCDMA CTCH enable/disable status
-
-        Returns:
-            None
-        """
-        cmd = "CTCHPARAMSETUP {},{}".format(enable.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def lac(self):
-        """ Gets the Location Area Code of the cell
-
-        Args:
-            None
-
-        Returns:
-            LAC value
-        """
-        cmd = "LAC? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @lac.setter
-    def lac(self, lac):
-        """ Sets the Location Area Code of the cell
-
-        Args:
-            lac: Location Area Code of the cell
-
-        Returns:
-            None
-        """
-        cmd = "LAC {},{}".format(lac, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def rac(self):
-        """ Gets the Routing Area Code of the cell
-
-        Args:
-            None
-
-        Returns:
-            RAC value
-        """
-        cmd = "RAC? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @rac.setter
-    def rac(self, rac):
-        """ Sets the Routing Area Code of the cell
-
-        Args:
-            rac: Routing Area Code of the cell
-
-        Returns:
-            None
-        """
-        cmd = "RAC {},{}".format(rac, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def dl_channel(self):
-        """ Gets the downlink channel number of the cell
-
-        Args:
-            None
-
-        Returns:
-            RAC value
-        """
-        cmd = "DLCHAN? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @dl_channel.setter
-    def dl_channel(self, channel):
-        """ Sets the downlink channel number of the cell
-
-        Args:
-            channel: downlink channel number of the cell
-
-        Returns:
-            None
-        """
-        cmd = "DLCHAN {},{}".format(channel, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def dl_cc_enabled(self):
-        """ Checks if component carrier is enabled or disabled
-
-        Args:
-            None
-
-        Returns:
-            True if enabled, False if disabled
-        """
-        return (self._anritsu.send_query("TESTDLCC?" +
-                                         self._bts_number) == "ENABLE")
-
-    @dl_cc_enabled.setter
-    def dl_cc_enabled(self, enabled):
-        """ Enables or disables the component carrier
-
-        Args:
-            enabled: True if it should be enabled, False if disabled
-
-        Returns:
-            None
-        """
-        cmd = "TESTDLCC {},{}".format("ENABLE" if enabled else "DISABLE",
-                                      self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def sector1_mcc(self):
-        """ Gets the sector 1 MCC of the CDMA cell
-
-        Args:
-            None
-
-        Returns:
-            sector 1 mcc
-        """
-        cmd = "S1MCC? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @sector1_mcc.setter
-    def sector1_mcc(self, mcc):
-        """ Sets the sector 1 MCC of the CDMA cell
-
-        Args:
-            mcc: sector 1 MCC of the CDMA cell
-
-        Returns:
-            None
-        """
-        cmd = "S1MCC {},{}".format(mcc, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def sector1_sid(self):
-        """ Gets the sector 1 system ID of the CDMA cell
-
-        Args:
-            None
-
-        Returns:
-            sector 1 system Id
-        """
-        cmd = "S1SID? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @sector1_sid.setter
-    def sector1_sid(self, sid):
-        """ Sets the sector 1 system ID of the CDMA cell
-
-        Args:
-            sid: sector 1 system ID of the CDMA cell
-
-        Returns:
-            None
-        """
-        cmd = "S1SID {},{}".format(sid, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def sector1_nid(self):
-        """ Gets the sector 1 network ID of the CDMA cell
-
-        Args:
-            None
-
-        Returns:
-            sector 1 network Id
-        """
-        cmd = "S1NID? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @sector1_nid.setter
-    def sector1_nid(self, nid):
-        """ Sets the sector 1 network ID of the CDMA cell
-
-        Args:
-            nid: sector 1 network ID of the CDMA cell
-
-        Returns:
-            None
-        """
-        cmd = "S1NID {},{}".format(nid, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def sector1_baseid(self):
-        """ Gets the sector 1 Base ID of the CDMA cell
-
-        Args:
-            None
-
-        Returns:
-            sector 1 Base Id
-        """
-        cmd = "S1BASEID? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @sector1_baseid.setter
-    def sector1_baseid(self, baseid):
-        """ Sets the sector 1 Base ID of the CDMA cell
-
-        Args:
-            baseid: sector 1 Base ID of the CDMA cell
-
-        Returns:
-            None
-        """
-        cmd = "S1BASEID {},{}".format(baseid, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def sector1_latitude(self):
-        """ Gets the sector 1 latitude of the CDMA cell
-
-        Args:
-            None
-
-        Returns:
-            sector 1 latitude
-        """
-        cmd = "S1LATITUDE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @sector1_latitude.setter
-    def sector1_latitude(self, latitude):
-        """ Sets the sector 1 latitude of the CDMA cell
-
-        Args:
-            latitude: sector 1 latitude of the CDMA cell
-
-        Returns:
-            None
-        """
-        cmd = "S1LATITUDE {},{}".format(latitude, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def sector1_longitude(self):
-        """ Gets the sector 1 longitude of the CDMA cell
-
-        Args:
-            None
-
-        Returns:
-            sector 1 longitude
-        """
-        cmd = "S1LONGITUDE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @sector1_longitude.setter
-    def sector1_longitude(self, longitude):
-        """ Sets the sector 1 longitude of the CDMA cell
-
-        Args:
-            longitude: sector 1 longitude of the CDMA cell
-
-        Returns:
-            None
-        """
-        cmd = "S1LONGITUDE {},{}".format(longitude, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def evdo_sid(self):
-        """ Gets the Sector ID of the EVDO cell
-
-        Args:
-            None
-
-        Returns:
-            Sector Id
-        """
-        cmd = "S1SECTORID? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @evdo_sid.setter
-    def evdo_sid(self, sid):
-        """ Sets the Sector ID of the EVDO cell
-
-        Args:
-            sid: Sector ID of the EVDO cell
-
-        Returns:
-            None
-        """
-        cmd = "S1SECTORID {},{}".format(sid, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cell_id(self):
-        """ Gets the cell identity of the cell
-
-        Args:
-            None
-
-        Returns:
-            cell identity
-        """
-        cmd = "CELLID? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @cell_id.setter
-    def cell_id(self, cell_id):
-        """ Sets the cell identity of the cell
-
-        Args:
-            cell_id: cell identity of the cell
-
-        Returns:
-            None
-        """
-        cmd = "CELLID {},{}".format(cell_id, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def physical_cellid(self):
-        """ Gets the physical cell id of the cell
-
-        Args:
-            None
-
-        Returns:
-            physical cell id
-        """
-        cmd = "PHYCELLID? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @physical_cellid.setter
-    def physical_cellid(self, physical_cellid):
-        """ Sets the physical cell id of the cell
-
-        Args:
-            physical_cellid: physical cell id of the cell
-
-        Returns:
-            None
-        """
-        cmd = "PHYCELLID {},{}".format(physical_cellid, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def gsm_mcs_dl(self):
-        """ Gets the Modulation and Coding scheme (DL) of the GSM cell
-
-        Args:
-            None
-
-        Returns:
-            DL MCS
-        """
-        cmd = "DLMCS? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @gsm_mcs_dl.setter
-    def gsm_mcs_dl(self, mcs_dl):
-        """ Sets the Modulation and Coding scheme (DL) of the GSM cell
-
-        Args:
-            mcs_dl: Modulation and Coding scheme (DL) of the GSM cell
-
-        Returns:
-            None
-        """
-        cmd = "DLMCS {},{}".format(mcs_dl, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def gsm_mcs_ul(self):
-        """ Gets the Modulation and Coding scheme (UL) of the GSM cell
-
-        Args:
-            None
-
-        Returns:
-            UL MCS
-        """
-        cmd = "ULMCS? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @gsm_mcs_ul.setter
-    def gsm_mcs_ul(self, mcs_ul):
-        """ Sets the Modulation and Coding scheme (UL) of the GSM cell
-
-        Args:
-            mcs_ul:Modulation and Coding scheme (UL) of the GSM cell
-
-        Returns:
-            None
-        """
-        cmd = "ULMCS {},{}".format(mcs_ul, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def lte_scheduling_mode(self):
-        """ Gets the Scheduling mode of the LTE cell
-
-        Args:
-            None
-
-        Returns:
-            Scheduling mode
-        """
-        cmd = "SCHEDULEMODE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @lte_scheduling_mode.setter
-    def lte_scheduling_mode(self, mode):
-        """ Sets the Scheduling mode of the LTE cell
-
-        Args:
-            mode: STATIC (default) or DYNAMIC
-
-        Returns:
-            None
-        """
-        counter = 1
-        while mode != self.lte_scheduling_mode:
-            if counter > 3:
-                raise AnritsuError("Fail to set scheduling mode in 3 tries!")
-            cmd = "SCHEDULEMODE {},{}".format(mode, self._bts_number)
-            self._anritsu.send_command(cmd)
-            counter += 1
-            time.sleep(1)
-
-    @property
-    def tbs_pattern(self):
-        """ Gets the TBS Pattern setting for the LTE cell
-
-        Args:
-            None
-
-        Returns:
-            TBS Pattern setting
-        """
-        cmd = "TBSPATTERN? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @tbs_pattern.setter
-    def tbs_pattern(self, pattern):
-        """ Sets the TBS Pattern setting for the LTE cell
-
-        Args:
-            mode: "FULLALLOCATION" or "OFF"
-
-        Returns:
-            None
-        """
-        cmd = "TBSPATTERN {}, {}".format(pattern, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def drx_connected_mode(self):
-        """ Gets the Connected DRX LTE cell parameter
-
-        Args:
-            None
-
-        Returns:
-            DRX connected mode (OFF, AUTO, MANUAL)
-        """
-        cmd = "DRXCONN? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @drx_connected_mode.setter
-    def drx_connected_mode(self, mode):
-        """  Sets the Connected DRX LTE cell parameter
-
-        Args:
-            mode: OFF, AUTO, MANUAL
-
-        Returns:
-            None
-        """
-        cmd = "DRXCONN {}, {}".format(mode, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def drx_on_duration_timer(self):
-        """ Gets the amount of PDCCH subframes to wait for data after
-            waking up from a DRX cycle
-
-        Args:
-            None
-
-        Returns:
-            DRX mode duration timer
-        """
-        cmd = "DRXDURATIONTIME? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @drx_on_duration_timer.setter
-    def drx_on_duration_timer(self, time):
-        """ Sets the amount of PDCCH subframes to wait for data after
-            waking up from a DRX cycle
-
-        Args:
-            timer: Amount of PDCCH subframes to wait for user data
-                to be transmitted
-
-        Returns:
-            None
-        """
-        cmd = "DRXDURATIONTIME PSF{}, {}".format(time, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def drx_inactivity_timer(self):
-        """ Gets the number of PDCCH subframes to wait before entering DRX mode
-
-        Args:
-            None
-
-        Returns:
-            DRX mode inactivity timer
-        """
-        cmd = "DRXINACTIVITYTIME? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @drx_inactivity_timer.setter
-    def drx_inactivity_timer(self, time):
-        """ Sets the number of PDCCH subframes to wait before entering DRX mode
-
-        Args:
-            timer: Length of the interval to wait
-
-        Returns:
-            None
-        """
-        cmd = "DRXINACTIVITYTIME PSF{}, {}".format(time, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def drx_retransmission_timer(self):
-        """ Gets the number of consecutive PDCCH subframes to wait
-        for retransmission
-
-        Args:
-            None
-
-        Returns:
-            Number of PDCCH subframes to wait for retransmission
-        """
-        cmd = "DRXRETRANSTIME? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @drx_retransmission_timer.setter
-    def drx_retransmission_timer(self, time):
-        """ Sets the number of consecutive PDCCH subframes to wait
-        for retransmission
-
-        Args:
-            time: Number of PDCCH subframes to wait
-            for retransmission
-
-        Returns:
-            None
-        """
-        cmd = "DRXRETRANSTIME PSF{}, {}".format(time, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def drx_long_cycle(self):
-        """ Gets the amount of subframes representing a DRX long cycle
-
-        Args:
-            None
-
-        Returns:
-            The amount of subframes representing one long DRX cycle.
-            One cycle consists of DRX sleep + DRX on duration
-        """
-        cmd = "DRXLONGCYCLE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @drx_long_cycle.setter
-    def drx_long_cycle(self, time):
-        """ Sets the amount of subframes representing a DRX long cycle
-
-        Args:
-            long_cycle: The amount of subframes representing one long DRX cycle.
-                One cycle consists of DRX sleep + DRX on duration
-
-        Returns:
-            None
-        """
-        cmd = "DRXLONGCYCLE SF{}, {}".format(time, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def drx_long_cycle_offset(self):
-        """ Gets the offset used to determine long cycle starting
-        subframe
-
-        Args:
-            None
-
-        Returns:
-            Long cycle offset
-        """
-        cmd = "DRXLONGCYCLESTARTOFFSET? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @drx_long_cycle_offset.setter
-    def drx_long_cycle_offset(self, offset):
-        """ Sets the offset used to determine long cycle starting
-        subframe
-
-        Args:
-            offset: Number in range 0...(long cycle - 1)
-        """
-        cmd = "DRXLONGCYCLESTARTOFFSET {}, {}".format(offset, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def lte_mcs_dl(self):
-        """ Gets the Modulation and Coding scheme (DL) of the LTE cell
-
-        Args:
-            None
-
-        Returns:
-            DL MCS
-        """
-        cmd = "DLIMCS? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @lte_mcs_dl.setter
-    def lte_mcs_dl(self, mcs_dl):
-        """ Sets the Modulation and Coding scheme (DL) of the LTE cell
-
-        Args:
-            mcs_dl: Modulation and Coding scheme (DL) of the LTE cell
-
-        Returns:
-            None
-        """
-        cmd = "DLIMCS {},{}".format(mcs_dl, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def lte_mcs_ul(self):
-        """ Gets the Modulation and Coding scheme (UL) of the LTE cell
-
-        Args:
-            None
-
-        Returns:
-            UL MCS
-        """
-        cmd = "ULIMCS? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @lte_mcs_ul.setter
-    def lte_mcs_ul(self, mcs_ul):
-        """ Sets the Modulation and Coding scheme (UL) of the LTE cell
-
-        Args:
-            mcs_ul: Modulation and Coding scheme (UL) of the LTE cell
-
-        Returns:
-            None
-        """
-        cmd = "ULIMCS {},{}".format(mcs_ul, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def lte_dl_modulation_order(self):
-        """ Gets the DL modulation order of the LTE cell
-
-        Args:
-            None
-
-        Returns:
-            The DL modulation order
-        """
-        cmd = "DLRMC_MOD? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @lte_dl_modulation_order.setter
-    def lte_dl_modulation_order(self, order):
-        """ Sets the DL modulation order of the LTE cell
-
-        Args:
-            order: the DL modulation order of the LTE cell
-
-        Returns:
-            None
-        """
-        if isinstance(order, ModulationType):
-            order = order.value
-        cmd = "DLRMC_MOD {},{}".format(order, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def lte_ul_modulation_order(self):
-        """ Gets the UL modulation order of the LTE cell
-
-        Args:
-            None
-
-        Returns:
-            The UL modulation order
-        """
-        cmd = "ULRMC_MOD? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @lte_ul_modulation_order.setter
-    def lte_ul_modulation_order(self, order):
-        """ Sets the UL modulation order of the LTE cell
-
-        Args:
-            order: the UL modulation order of the LTE cell
-
-        Returns:
-            None
-        """
-        if isinstance(order, ModulationType):
-            order = order.value
-        cmd = "ULRMC_MOD {},{}".format(order, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def nrb_dl(self):
-        """ Gets the Downlink N Resource Block of the cell
-
-        Args:
-            None
-
-        Returns:
-            Downlink NRB
-        """
-        cmd = "DLNRB? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @nrb_dl.setter
-    def nrb_dl(self, blocks):
-        """ Sets the Downlink N Resource Block of the cell
-
-        Args:
-            blocks: Downlink N Resource Block of the cell
-
-        Returns:
-            None
-        """
-        cmd = "DLNRB {},{}".format(blocks, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def nrb_ul(self):
-        """ Gets the uplink N Resource Block of the cell
-
-        Args:
-            None
-
-        Returns:
-            uplink NRB
-        """
-        cmd = "ULNRB? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @nrb_ul.setter
-    def nrb_ul(self, blocks):
-        """ Sets the uplink N Resource Block of the cell
-
-        Args:
-            blocks: uplink N Resource Block of the cell
-
-        Returns:
-            None
-        """
-        cmd = "ULNRB {},{}".format(blocks, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def max_nrb_ul(self):
-        ul_bandwidth = self.ul_bandwidth
-        if ul_bandwidth == 'SAMEASDL':
-            ul_bandwidth = self.dl_bandwidth
-        max_nrb = MAX_NRB_FOR_BANDWIDTH.get(ul_bandwidth, None)
-        if not max_nrb:
-            raise ValueError('Could not get maximum RB allocation'
-                             'for bandwidth: {}'.format(ul_bandwidth))
-        return max_nrb
-
-    @property
-    def mimo_support(self):
-        """ Gets the maximum supported MIMO mode for the LTE bases tation.
-
-        Returns:
-            the MIMO mode as a string
-        """
-        cmd = "LTEMIMO? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @mimo_support.setter
-    def mimo_support(self, mode):
-        """ Sets the maximum supported MIMO mode for the LTE base station.
-
-        Args:
-            mode: a string or an object of the LteMimoMode class.
-        """
-
-        if isinstance(mode, LteMimoMode):
-            mode = mode.value
-
-        cmd = "LTEMIMO {},{}".format(self._bts_number, mode)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def neighbor_cell_mode(self):
-        """ Gets the neighbor cell mode
-
-        Args:
-            None
-
-        Returns:
-            current neighbor cell mode
-        """
-        cmd = "NCLIST? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @neighbor_cell_mode.setter
-    def neighbor_cell_mode(self, mode):
-        """ Sets the neighbor cell mode
-
-        Args:
-            mode: neighbor cell mode , DEFAULT/ USERDATA
-
-        Returns:
-            None
-        """
-        cmd = "NCLIST {},{}".format(mode, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    def get_neighbor_cell_type(self, system, index):
-        """ Gets the neighbor cell type
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell type
-        """
-        cmd = "NCTYPE? {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def set_neighbor_cell_type(self, system, index, cell_type):
-        """ Sets the neighbor cell type
-
-        Args:
-            system: simulation model of neighbor cell
-                   LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-            cell_type: cell type
-                BTS1, BTS2, BTS3, BTS4,CELLNAME, DISABLE
-
-        Returns:
-            None
-        """
-        cmd = "NCTYPE {},{},{},{}".format(system, index, cell_type,
-                                          self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    def get_neighbor_cell_name(self, system, index):
-        """ Gets the neighbor cell name
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell name
-        """
-        cmd = "NCCELLNAME? {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def set_neighbor_cell_name(self, system, index, name):
-        """ Sets the neighbor cell name
-
-        Args:
-            system: simulation model of neighbor cell
-                   LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-            name: cell name
-
-        Returns:
-            None
-        """
-        cmd = "NCCELLNAME {},{},{},{}".format(system, index, name,
-                                              self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    def get_neighbor_cell_mcc(self, system, index):
-        """ Gets the neighbor cell mcc
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell mcc
-        """
-        cmd = "NCMCC? {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_mnc(self, system, index):
-        """ Gets the neighbor cell mnc
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell mnc
-        """
-        cmd = "NCMNC? {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_id(self, system, index):
-        """ Gets the neighbor cell id
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell id
-        """
-        cmd = "NCCELLID? {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_tac(self, system, index):
-        """ Gets the neighbor cell tracking area code
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell tracking area code
-        """
-        cmd = "NCTAC? {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_dl_channel(self, system, index):
-        """ Gets the neighbor cell downlink channel
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell tracking downlink channel
-        """
-        cmd = "NCDLCHAN? {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_dl_bandwidth(self, system, index):
-        """ Gets the neighbor cell downlink bandwidth
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell tracking downlink bandwidth
-        """
-        cmd = "NCDLBANDWIDTH {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_pcid(self, system, index):
-        """ Gets the neighbor cell physical cell id
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell physical cell id
-        """
-        cmd = "NCPHYCELLID {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_lac(self, system, index):
-        """ Gets the neighbor cell location area code
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell location area code
-        """
-        cmd = "NCLAC {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_rac(self, system, index):
-        """ Gets the neighbor cell routing area code
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell routing area code
-        """
-        cmd = "NCRAC {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    @property
-    def primary_scrambling_code(self):
-        """ Gets the primary scrambling code for WCDMA cell
-
-        Args:
-            None
-
-        Returns:
-            primary scrambling code
-        """
-        cmd = "PRISCRCODE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @primary_scrambling_code.setter
-    def primary_scrambling_code(self, psc):
-        """ Sets the primary scrambling code for WCDMA cell
-
-        Args:
-            psc: primary scrambling code
-
-        Returns:
-            None
-        """
-        cmd = "PRISCRCODE {},{}".format(psc, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def tac(self):
-        """ Gets the Tracking Area Code of the LTE cell
-
-        Args:
-            None
-
-        Returns:
-            Tracking Area Code of the LTE cell
-        """
-        cmd = "TAC? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @tac.setter
-    def tac(self, tac):
-        """ Sets the Tracking Area Code of the LTE cell
-
-        Args:
-            tac: Tracking Area Code of the LTE cell
-
-        Returns:
-            None
-        """
-        cmd = "TAC {},{}".format(tac, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cell(self):
-        """ Gets the current cell for BTS
-
-        Args:
-            None
-
-        Returns:
-            current cell for BTS
-        """
-        cmd = "CELLSEL? {}".format(self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    @cell.setter
-    def cell(self, cell_name):
-        """ sets the  cell for BTS
-        Args:
-            cell_name: cell name
-
-        Returns:
-            None
-        """
-        cmd = "CELLSEL {},{}".format(self._bts_number, cell_name)
-        return self._anritsu.send_command(cmd)
-
-    @property
-    def gsm_cbch(self):
-        """ Gets the GSM CBCH enable/disable status
-
-        Args:
-            None
-
-        Returns:
-            one of CBCHSetup values
-        """
-        cmd = "CBCHPARAMSETUP? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @gsm_cbch.setter
-    def gsm_cbch(self, enable):
-        """ Sets the GSM CBCH enable/disable status
-
-        Args:
-            enable: GSM CBCH enable/disable status
-
-        Returns:
-            None
-        """
-        cmd = "CBCHPARAMSETUP {},{}".format(enable.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def gsm_gprs_mode(self):
-        """ Gets the GSM connection mode
-
-        Args:
-            None
-
-        Returns:
-            A string indicating if connection is EGPRS, GPRS or non-GPRS
-        """
-        cmd = "GPRS? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @gsm_gprs_mode.setter
-    def gsm_gprs_mode(self, mode):
-        """ Sets the GPRS connection mode
-
-        Args:
-            mode: GPRS connection mode
-
-        Returns:
-            None
-        """
-
-        if not isinstance(mode, BtsGprsMode):
-            raise ValueError(' The parameter should be of type "BtsGprsMode"')
-        cmd = "GPRS {},{}".format(mode.value, self._bts_number)
-
-        self._anritsu.send_command(cmd)
-
-    @property
-    def gsm_slots(self):
-        """ Gets the GSM slot assignment
-
-        Args:
-            None
-
-        Returns:
-            A tuple indicating DL and UL slots.
-        """
-
-        cmd = "MLTSLTCFG? " + self._bts_number
-
-        response = self._anritsu.send_query(cmd)
-        split_response = response.split(',')
-
-        if not len(split_response) == 2:
-            raise ValueError(response)
-
-        return response[0], response[1]
-
-    @gsm_slots.setter
-    def gsm_slots(self, slots):
-        """ Sets the number of downlink / uplink slots for GSM
-
-        Args:
-            slots: a tuple containing two ints indicating (DL,UL)
-
-        Returns:
-            None
-        """
-
-        try:
-            dl, ul = slots
-            dl = int(dl)
-            ul = int(ul)
-        except:
-            raise ValueError(
-                'The parameter slot has to be a tuple containing two ints '
-                'indicating (dl,ul) slots.')
-
-        # Validate
-        if dl < 1 or ul < 1 or dl + ul > 5:
-            raise ValueError(
-                'DL and UL slots have to be >= 1 and the sum <= 5.')
-
-        cmd = "MLTSLTCFG {},{},{}".format(dl, ul, self._bts_number)
-
-        self._anritsu.send_command(cmd)
-
-
-class _VirtualPhone(object):
-    '''Class to interact with virtual phone supported by MD8475 '''
-    def __init__(self, anritsu):
-        self._anritsu = anritsu
-        self.log = anritsu.log
-
-    @property
-    def id(self):
-        """ Gets the virtual phone ID
-
-        Args:
-            None
-
-        Returns:
-            virtual phone ID
-        """
-        cmd = "VPID? "
-        return self._anritsu.send_query(cmd)
-
-    @id.setter
-    def id(self, phonenumber):
-        """ Sets the virtual phone ID
-
-        Args:
-            phonenumber: virtual phone ID
-
-        Returns:
-            None
-        """
-        cmd = "VPID {}".format(phonenumber)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def id_c2k(self):
-        """ Gets the virtual phone ID for CDMA 1x
-
-        Args:
-            None
-
-        Returns:
-            virtual phone ID
-        """
-        cmd = "VPIDC2K? "
-        return self._anritsu.send_query(cmd)
-
-    @id_c2k.setter
-    def id_c2k(self, phonenumber):
-        """ Sets the virtual phone ID for CDMA 1x
-
-        Args:
-            phonenumber: virtual phone ID
-
-        Returns:
-            None
-        """
-        cmd = "VPIDC2K {}".format(phonenumber)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def auto_answer(self):
-        """ Gets the auto answer status of virtual phone
-
-        Args:
-            None
-
-        Returns:
-            auto answer status, ON/OFF
-        """
-        cmd = "VPAUTOANSWER? "
-        return self._anritsu.send_query(cmd)
-
-    @auto_answer.setter
-    def auto_answer(self, option):
-        """ Sets the auto answer feature
-
-        Args:
-            option: tuple with two items for turning on Auto Answer
-                    (OFF or (ON, timetowait))
-
-        Returns:
-            None
-        """
-        enable = "OFF"
-        time = 5
-
-        try:
-            enable, time = option
-        except ValueError:
-            if enable != "OFF":
-                raise ValueError("Pass a tuple with two items for"
-                                 " Turning on Auto Answer")
-        cmd = "VPAUTOANSWER {},{}".format(enable.value, time)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def calling_mode(self):
-        """ Gets the calling mode of virtual phone
-</