diff --git a/.editorconfig b/.editorconfig
new file mode 100644
index 0000000..027bdb1
--- /dev/null
+++ b/.editorconfig
@@ -0,0 +1,14 @@
+# EditorConfig is awesome: https://EditorConfig.org
+
+root = true
+
+[*]
+end_of_line = lf
+insert_final_newline = true
+trim_trailing_whitespace = true
+
+[*.py]
+charset = utf-8
+indent_style = space
+indent_size = 4
+max_line_length = 88
diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
new file mode 100644
index 0000000..befa060
--- /dev/null
+++ b/.git-blame-ignore-revs
@@ -0,0 +1,5 @@
+# Run code through yapf
+19a821d5f1ff9079f9a40d27553182a433a27834
+
+# Run code through black
+0d9e3581d57f376865f49ae62fe9171789beca56
diff --git a/.gitignore b/.gitignore
index 029341d..9321436 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,90 +1,48 @@
+#
+# OS-specific
+#
+
 .DS_Store
-# Byte-compiled / optimized / DLL files
-__pycache__/
+
+#
+# Language specific
+#
+
+# Python
 *.py[cod]
-*$py.class
-
-# C extensions
-*.so
-
-# Distribution / packaging
-.Python
-env/
-build/
-develop-eggs/
-dist/
-downloads/
-eggs/
-.eggs/
-lib/
-lib64/
-parts/
-sdist/
-var/
 *.egg-info/
-.installed.cfg
-*.egg
-
-# PyInstaller
-#  Usually these files are written by a python script from a template
-#  before PyInstaller builds the exe, so as to inject date/other infos into it.
-*.manifest
-*.spec
-
-# Installer logs
-pip-log.txt
-pip-delete-this-directory.txt
-
-# Unit test / coverage reports
-htmlcov/
-.tox/
-.coverage
-.coverage.*
-.cache
-nosetests.xml
-coverage.xml
-*,cover
-.hypothesis/
-
-# Translations
-*.mo
-*.pot
-
-# Django stuff:
-*.log
-local_settings.py
-
-# Sphinx documentation
-docs/_build/
-
-# PyBuilder
-target/
-
-#Ipython Notebook
-.ipynb_checkpoints
-
-# pyenv
-.python-version
-
-# PyCharm
-.idea/
-
-# IntelliJ
-*.iml
-
-# VSCode
-/.vscode
-
-# Python virtual environment
+/build/
 /.venv
+/.mypy_cache
 
-# antlion configuration files
+#
+# Editors
+#
+
+/.idea/
+/.vscode/
+
+#
+# antlion
+#
+
+# Configuration
 /*.json
 /*.yaml
 /config/
 
-# antlion runtime files
+# Generated during run-time
 /logs
 
 # Local development scripts
 /*.sh
+!/format.sh
+
+#
+# third_party
+#
+
+/third_party/*
+!/third_party/github.com/
+!/third_party/github.com/jd/tenacity
+/third_party/github.com/jd/tenacity/src
diff --git a/BUILD.gn b/BUILD.gn
new file mode 100644
index 0000000..ce2c77a
--- /dev/null
+++ b/BUILD.gn
@@ -0,0 +1,229 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Declare Fuchsia build targets for using antlion from the Fuchsia tree.
+# Requires additional configuration of jiri fetch attributes from your Fuchsia
+# checkout:
+#   `jiri init -fetch-optional=antlion`
+
+import("//build/python/python_library.gni")
+
+# Tests for full build validation
+group("e2e_tests") {
+  testonly = true
+  public_deps = [ "tests:e2e_tests" ]
+}
+
+# Subset of tests to validate builds in under 15 minutes.
+group("e2e_tests_quick") {
+  testonly = true
+  public_deps = [ "tests:e2e_tests_quick" ]
+}
+
+# Tests for at-desk custom validation
+group("e2e_tests_manual") {
+  testonly = true
+  public_deps = [ "tests:e2e_tests_manual" ]
+}
+
+# deprecated: prefer e2e_tests_quick
+group("smoke_tests") {
+  testonly = true
+  public_deps = [ ":e2e_tests_quick" ]
+}
+
+# Unit tests only
+group("tests") {
+  testonly = true
+  public_deps = [ "runner:tests" ]
+}
+
+python_library("antlion") {
+  source_root = "//third_party/antlion/packages/antlion"
+  testonly = true
+  sources = [
+    "__init__.py",
+    "base_test.py",
+    "bin/__init__.py",
+    "bin/act.py",
+    "capabilities/__init__.py",
+    "capabilities/ssh.py",
+    "config_parser.py",
+    "context.py",
+    "controllers/__init__.py",
+    "controllers/access_point.py",
+    "controllers/adb.py",
+    "controllers/adb_lib/__init__.py",
+    "controllers/adb_lib/error.py",
+    "controllers/android_device.py",
+    "controllers/android_lib/__init__.py",
+    "controllers/android_lib/errors.py",
+    "controllers/android_lib/events.py",
+    "controllers/android_lib/logcat.py",
+    "controllers/android_lib/services.py",
+    "controllers/android_lib/tel/__init__.py",
+    "controllers/android_lib/tel/tel_utils.py",
+    "controllers/ap_lib/__init__.py",
+    "controllers/ap_lib/ap_get_interface.py",
+    "controllers/ap_lib/ap_iwconfig.py",
+    "controllers/ap_lib/bridge_interface.py",
+    "controllers/ap_lib/dhcp_config.py",
+    "controllers/ap_lib/dhcp_server.py",
+    "controllers/ap_lib/extended_capabilities.py",
+    "controllers/ap_lib/hostapd.py",
+    "controllers/ap_lib/hostapd_ap_preset.py",
+    "controllers/ap_lib/hostapd_bss_settings.py",
+    "controllers/ap_lib/hostapd_config.py",
+    "controllers/ap_lib/hostapd_constants.py",
+    "controllers/ap_lib/hostapd_security.py",
+    "controllers/ap_lib/hostapd_utils.py",
+    "controllers/ap_lib/radio_measurement.py",
+    "controllers/ap_lib/radvd.py",
+    "controllers/ap_lib/radvd_config.py",
+    "controllers/ap_lib/radvd_constants.py",
+    "controllers/ap_lib/regulatory_channels.py",
+    "controllers/ap_lib/third_party_ap_profiles/__init__.py",
+    "controllers/ap_lib/third_party_ap_profiles/actiontec.py",
+    "controllers/ap_lib/third_party_ap_profiles/asus.py",
+    "controllers/ap_lib/third_party_ap_profiles/belkin.py",
+    "controllers/ap_lib/third_party_ap_profiles/linksys.py",
+    "controllers/ap_lib/third_party_ap_profiles/netgear.py",
+    "controllers/ap_lib/third_party_ap_profiles/securifi.py",
+    "controllers/ap_lib/third_party_ap_profiles/tplink.py",
+    "controllers/ap_lib/wireless_network_management.py",
+    "controllers/attenuator.py",
+    "controllers/attenuator_lib/__init__.py",
+    "controllers/attenuator_lib/_tnhelper.py",
+    "controllers/attenuator_lib/aeroflex/__init__.py",
+    "controllers/attenuator_lib/aeroflex/telnet.py",
+    "controllers/attenuator_lib/minicircuits/__init__.py",
+    "controllers/attenuator_lib/minicircuits/http.py",
+    "controllers/attenuator_lib/minicircuits/telnet.py",
+    "controllers/fastboot.py",
+    "controllers/fuchsia_device.py",
+    "controllers/fuchsia_lib/__init__.py",
+    "controllers/fuchsia_lib/base_lib.py",
+    "controllers/fuchsia_lib/device_lib.py",
+    "controllers/fuchsia_lib/ffx.py",
+    "controllers/fuchsia_lib/hardware_power_statecontrol_lib.py",
+    "controllers/fuchsia_lib/lib_controllers/__init__.py",
+    "controllers/fuchsia_lib/lib_controllers/wlan_controller.py",
+    "controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py",
+    "controllers/fuchsia_lib/location/__init__.py",
+    "controllers/fuchsia_lib/location/regulatory_region_lib.py",
+    "controllers/fuchsia_lib/logging_lib.py",
+    "controllers/fuchsia_lib/netstack/__init__.py",
+    "controllers/fuchsia_lib/netstack/netstack_lib.py",
+    "controllers/fuchsia_lib/package_server.py",
+    "controllers/fuchsia_lib/sl4f.py",
+    "controllers/fuchsia_lib/ssh.py",
+    "controllers/fuchsia_lib/utils_lib.py",
+    "controllers/fuchsia_lib/wlan_ap_policy_lib.py",
+    "controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py",
+    "controllers/fuchsia_lib/wlan_lib.py",
+    "controllers/fuchsia_lib/wlan_policy_lib.py",
+    "controllers/iperf_client.py",
+    "controllers/iperf_server.py",
+    "controllers/openwrt_ap.py",
+    "controllers/openwrt_lib/__init__.py",
+    "controllers/openwrt_lib/network_const.py",
+    "controllers/openwrt_lib/network_settings.py",
+    "controllers/openwrt_lib/openwrt_constants.py",
+    "controllers/openwrt_lib/wireless_config.py",
+    "controllers/openwrt_lib/wireless_settings_applier.py",
+    "controllers/packet_capture.py",
+    "controllers/packet_sender.py",
+    "controllers/pdu.py",
+    "controllers/pdu_lib/__init__.py",
+    "controllers/pdu_lib/digital_loggers/__init__.py",
+    "controllers/pdu_lib/digital_loggers/webpowerswitch.py",
+    "controllers/pdu_lib/synaccess/__init__.py",
+    "controllers/pdu_lib/synaccess/np02b.py",
+    "controllers/sl4a_lib/__init__.py",
+    "controllers/sl4a_lib/error_reporter.py",
+    "controllers/sl4a_lib/event_dispatcher.py",
+    "controllers/sl4a_lib/rpc_client.py",
+    "controllers/sl4a_lib/rpc_connection.py",
+    "controllers/sl4a_lib/sl4a_manager.py",
+    "controllers/sl4a_lib/sl4a_ports.py",
+    "controllers/sl4a_lib/sl4a_session.py",
+    "controllers/sl4a_lib/sl4a_types.py",
+    "controllers/sniffer.py",
+    "controllers/sniffer_lib/__init__.py",
+    "controllers/sniffer_lib/local/__init__.py",
+    "controllers/sniffer_lib/local/local_base.py",
+    "controllers/sniffer_lib/local/tcpdump.py",
+    "controllers/sniffer_lib/local/tshark.py",
+    "controllers/utils_lib/__init__.py",
+    "controllers/utils_lib/commands/__init__.py",
+    "controllers/utils_lib/commands/ip.py",
+    "controllers/utils_lib/commands/route.py",
+    "controllers/utils_lib/commands/shell.py",
+    "controllers/utils_lib/host_utils.py",
+    "controllers/utils_lib/ssh/__init__.py",
+    "controllers/utils_lib/ssh/connection.py",
+    "controllers/utils_lib/ssh/formatter.py",
+    "controllers/utils_lib/ssh/settings.py",
+    "decorators.py",
+    "dict_object.py",
+    "error.py",
+    "event/__init__.py",
+    "event/decorators.py",
+    "event/event.py",
+    "event/event_bus.py",
+    "event/event_subscription.py",
+    "event/subscription_bundle.py",
+    "event/subscription_handle.py",
+    "keys.py",
+    "libs/__init__.py",
+    "libs/logging/__init__.py",
+    "libs/logging/log_stream.py",
+    "libs/ota/__init__.py",
+    "libs/ota/ota_runners/__init__.py",
+    "libs/ota/ota_runners/ota_runner.py",
+    "libs/ota/ota_runners/ota_runner_factory.py",
+    "libs/ota/ota_tools/__init__.py",
+    "libs/ota/ota_tools/adb_sideload_ota_tool.py",
+    "libs/ota/ota_tools/ota_tool.py",
+    "libs/ota/ota_tools/ota_tool_factory.py",
+    "libs/ota/ota_tools/update_device_ota_tool.py",
+    "libs/ota/ota_updater.py",
+    "libs/proc/__init__.py",
+    "libs/proc/job.py",
+    "libs/proc/process.py",
+    "libs/yaml_writer.py",
+    "logger.py",
+    "net.py",
+    "records.py",
+    "runner.py",
+    "signals.py",
+    "test_decorators.py",
+    "test_runner.py",
+    "test_utils/__init__.py",
+    "test_utils/abstract_devices/__init__.py",
+    "test_utils/abstract_devices/wlan_device.py",
+    "test_utils/abstract_devices/wmm_transceiver.py",
+    "test_utils/dhcp/__init__.py",
+    "test_utils/dhcp/base_test.py",
+    "test_utils/fuchsia/__init__.py",
+    "test_utils/fuchsia/utils.py",
+    "test_utils/fuchsia/wmm_test_cases.py",
+    "test_utils/net/__init__.py",
+    "test_utils/net/connectivity_const.py",
+    "test_utils/net/net_test_utils.py",
+    "test_utils/wifi/__init__.py",
+    "test_utils/wifi/base_test.py",
+    "test_utils/wifi/wifi_constants.py",
+    "test_utils/wifi/wifi_test_utils.py",
+    "tracelogger.py",
+    "utils.py",
+    "validation.py",
+  ]
+  library_deps = [
+    "//src/testing/end_to_end/honeydew",
+    "//third_party/mobly",
+    "//third_party/pyyaml:yaml",
+    "third_party/github.com/jd/tenacity",
+  ]
+}
diff --git a/CHANGELOG.md b/CHANGELOG.md
index a9c7f67..0c36022 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -10,20 +10,79 @@
 
 ## [Unreleased]
 
-### Added
-
-### Changed
+[unreleased]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.3.0..refs/heads/main
 
 ### Removed
 
-### Fixed
+- [BREAKING CHANGE] Support for Python 3.8, 3.9, and 3.10. The minimum supported
+version of Python is now 3.11. If running antlion as part of the Fuchsia tree,
+nothing is required; Python 3.11 is vendored with Fuchsia and will be found by
+GN. If running antlion out of tree, ensure your Python version is at least 3.11.
+- `WlanRvrTest` user params `debug_pre_traffic_cmd` and `debug_post_traffic_cmd`
 
-[unreleased]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.2.0..refs/heads/main
+## [0.3.0] - 2023-05-17
 
-## [0.2.0] - 2022-01-03
+[0.3.0]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.2.0..refs/tags/v0.3.0
+
+### Deprecated
+
+- **Support for ACTS JSON configs; instead, use Mobly YAML configs.** To
+ease this transition, upon running `act.py`, a compatible YAML config will be
+generated for you and placed next to your JSON config.
+- **The `act.py` binary; instead, invoke tests directly.** Upon running
+`act.py`, a deprecation warning will provide instructions for how to invoke
+antlion tests without act.py and with the newly generated YAML config.
 
 ### Added
 
+- Presubmit testing in [CV] (aka CQ). All tests specified with the `qemu_env`
+environment will run before every antlion CL is submitted.
+- Postsubmit testing in [CI]. See [Milo] for an exhaustive list of builders.
+- [EditorConfig] file for consistent coding styles.
+Installing an EditorConfig plugin for your editor is highly recommended.
+
+[CV]: https://chromium.googlesource.com/infra/luci/luci-go/+/refs/heads/main/cv/README.md
+[CI]: https://chromium.googlesource.com/chromium/src/+/master/docs/tour_of_luci_ui.md
+[Milo]: https://luci-milo.appspot.com/ui/search?q=antlion
+[EditorConfig]: https://editorconfig.org
+
+### Changed
+
+- Default test execution from ACTS to Mobly. `antlion_host_test()` now invokes
+the test file directly using the Mobly test runner, rather than using `act.py`.
+  - All tests have been refactored to allow direct running with the Mobly test
+  runner.
+  - `act.py` now converts ACTS JSON config to compatible Mobly YAML config. The
+  resulting config is passed directly to Mobly's config parser. See notes for
+  this release's deprecations above.
+- Generate YAML config instead of JSON config from antlion-runner.
+- `FuchsiaDevice.authorized_file_loc` config field is now optional. This field
+is only used during `FlashTest`; it is not used when the device is already
+provisioned (e.g. when tests are dispatched in Fuchsia infrastructure).
+
+### Removed
+
+- Unused controllers and tests (full list)
+
+### Fixed
+
+- Failure to stop session_manager using ffx in `WlanRebootTest` ([@patricklu],
+[bug](http://b/267330535))
+- Failure to parse 'test_name' in DHCP configuration file in `Dhcpv4InteropTest`
+(invalid option) introduced by previous refactor ([@patricklu],
+[bug](http://b/232574848))
+- Logging for `Dhcpv4InteropTest` changed to utilize a temp file instead of
+/var/log/messages to fix test error with duplicate PID log messages
+([@patricklu], [bug](http://b/232574848))
+
+## [0.2.0] - 2023-01-03
+
+[0.2.0]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.1.0..refs/tags/v0.2.0
+
+### Added
+
+- Added snapshots before reboot and during test teardown in `WlanRebootTest`
+([@patricklu], [bug](http://b/273923552))
 - Download radvd logs from AP for debugging IPv6 address allocation
 - Optional `wlan_features` config field to `FuchsiaDevice` for declaring which
 WLAN features the device supports, such as BSS Transition Management
@@ -32,12 +91,12 @@
 
 - All path config options in `FuchsiaDevice` expand the home directory (`~`) and
 environmental variables
-	- Used by `ssh_priv_key`, `authorized_file_loc`, and `ffx_binary_path` for
-	sensible defaults using `$FUCHSIA_DIR`
+  - Used by `ssh_priv_key`, `authorized_file_loc`, and `ffx_binary_path` for
+  sensible defaults using `$FUCHSIA_DIR`
 - Running tests works out of the box without specifying `--testpaths`
-	- Moved `tests` and `unit_tests` to the `antlion` package, enabling
-	straight-forward packaging of tests.
-	- Merged `antlion` and `antlion_contrib` packages
+  - Moved `tests` and `unit_tests` to the `antlion` package, enabling
+  straight-forward packaging of tests.
+  - Merged `antlion` and `antlion_contrib` packages
 - Converted several required dependencies to optional dependencies:
   - `bokeh` is only needed for producing HTML graphing. If this feature is
   desired, install antlion with the bokeh option: `pip install ".[bokeh]"`
@@ -57,19 +116,19 @@
 - Failure to acquire IPv6 address in `WlanRebootTest` ([bug](http://b/256009189))
 - Typo in `ChannelSweepTest` preventing use of iPerf ([@patricklu])
 - "Country code never updated" error affecting all Fuchsia ToT builds
-([@karlward], [bug](https://fxbug.dev/116500))
+([@karlward], [bug](https://fxbug.dev/42067674))
 - Parsing new stderr format from `ffx component destroy` ([@karlward],
-[bug](https://fxbug.dev/116544))
+[bug](https://fxbug.dev/42067722))
 - "Socket operation on non-socket" error during initialization of ffx on MacOS
-([@karlward], [bug](https://fxbug.dev/116626))
+([@karlward], [bug](https://fxbug.dev/42067812))
 - Python 3.8 support for IPv6 scope IDs ([bug](http://b/261746355))
 
-[0.2.0]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.1.0..refs/tags/v0.2.0
-
 ## [0.1.0] - 2022-11-28
 
 Forked from ACTS with the following changes
 
+[0.1.0]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.1.0
+
 ### Added
 
 - A modern approach to installation using `pyproject.toml` via `pip install .`
@@ -80,6 +139,8 @@
 - Package and import names from ACTS to antlion
 - Copyright notice from AOSP to Fuchsia Authors
 
+[src-layout]: https://setuptools.pypa.io/en/latest/userguide/package_discovery.html#src-layout
+
 ### Deprecated
 
 - Use of the `setup.py` script. This is only used to keep infrastructure
@@ -98,9 +159,6 @@
 - KeyError for 'mac_addr' in WlanDeprecatedConfigurationTest ([@sakuma],
 [bug](http://b/237709921))
 
-[0.1.0]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.1.0
-[src-layout]: https://setuptools.pypa.io/en/latest/userguide/package_discovery.html#src-layout
-
 [@sakuma]: https://fuchsia-review.git.corp.google.com/q/owner:sakuma%2540google.com
 [@patricklu]: https://fuchsia-review.git.corp.google.com/q/owner:patricklu%2540google.com
 [@karlward]: https://fuchsia-review.git.corp.google.com/q/owner:karlward%2540google.com
diff --git a/MANIFEST.in b/MANIFEST.in
index a8ad1bb..a6caf7f 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,4 +1,4 @@
 include setup.py README.md
-recursive-include src/antlion *
+recursive-include packages/antlion *
 global-exclude .DS_Store
 global-exclude *.pyc
diff --git a/README.md b/README.md
index be529cf..74c5a6d 100644
--- a/README.md
+++ b/README.md
@@ -7,13 +7,91 @@
 
 [TOC]
 
-[Docs]: http://go/fxca
+[Docs]: http://go/antlion
 [Report Bug]: http://go/conn-test-bug
 [Request Feature]: http://b/issues/new?component=1182297&template=1680893
 
-## Getting Started
+## Getting started with QEMU
 
-Requires Python 3.8+
+The quickest way to run antlion is by using the Fuchsia QEMU emulator. This
+enables antlion tests that do not require hardware-specific capabilities like
+WLAN. This is especially useful to verify if antlion builds and runs without
+syntax errors. If you require WLAN capabilities, see
+[below](#running-with-a-local-physical-device).
+
+1. [Checkout Fuchsia](https://fuchsia.dev/fuchsia-src/get-started/get_fuchsia_source)
+
+2. Configure and build Fuchsia to run antlion tests virtually on QEMU
+
+   ```sh
+   fx set core.qemu-x64 \
+      --with //src/testing/sl4f \
+      --with //src/sys/bin/start_sl4f \
+      --args 'core_realm_shards += [ "//src/testing/sl4f:sl4f_core_shard" ]' \
+      --with-host //third_party/antlion:e2e_tests_quick
+   fx build
+   ```
+
+3. In a separate terminal, run the emulator with networking enabled
+
+   ```sh
+   ffx emu stop && ffx emu start -H --net tap && ffx log
+   ```
+
+4. In a separate terminal, run a package server
+
+   ```sh
+   fx serve
+   ```
+
+5. Run an antlion test
+
+   ```sh
+   fx test --e2e --output //third_party/antlion/tests/examples:sl4f_sanity_test
+   ```
+
+## Running with a local physical device
+
+A physical device is required for most antlion tests, which rely on physical I/O
+such as WLAN and Bluetooth. Antlion is designed to make testing physical devices
+as easy, reliable, and reproducible as possible. The device will be discovered
+using mDNS, so make sure your host machine has a network connection to the
+device.
+
+1. Configure and build Fuchsia for your target with the following extra
+   arguments:
+
+   ```sh
+   fx set core.my-super-cool-product \
+      --with //src/testing/sl4f \
+      --with //src/sys/bin/start_sl4f \
+      --args='core_realm_shards += [ "//src/testing/sl4f:sl4f_core_shard" ]' \
+      --with-host //third_party/antlion:e2e_tests
+   fx build
+   ```
+
+2. Flash your device with the new build
+
+3. In a separate terminal, run a package server
+
+   ```sh
+   fx serve
+   ```
+
+4. Run an antlion test
+
+   ```sh
+   fx test --e2e --output //third_party/antlion/tests/functional:ping_stress_test
+   ```
+
+> Local auxiliary devices are not yet support by `antlion-runner`, which is
+> responsible for generating Mobly configs. In the meantime, see the
+> section below for manually crafting Mobly configs to support auxiliary
+> devices.
+
+## Running without a Fuchsia checkout
+
+Requires Python 3.11+
 
 1. Clone the repo
 
@@ -25,52 +103,81 @@
 
    ```sh
    cd antlion
-   python3 -m venv .venv  # creates a "virtual environment" in the `.venv` directory
-   source .venv/bin/activate  # activates the virtual environment. Run `deactivate` to exit it later
-   pip install --editable ".[dev,test]"
+   python3 -m venv .venv      # Create a virtual environment in the `.venv` directory
+   source .venv/bin/activate  # Activate the virtual environment
+   pip install --editable ".[mdns]"
+   # Run `deactivate` later to exit the virtual environment
    ```
 
 3. Write the sample config and update the Fuchsia controller to match your
    development environment
 
    ```sh
-   mkdir -p config
-   cat <<EOF > config/simple.json
-   {
-      "testbed": [{
-         "name": "simple_testbed",
-         "FuchsiaDevice": [{
-            "ip": "fuchsia-00e0-4c01-04df"
-         }]
-      }],
-      "logpath": "logs"
-   }
+   cat <<EOF > simple-config.yaml
+   TestBeds:
+   - Name: antlion-runner
+     Controllers:
+       FuchsiaDevice:
+       - ip: fuchsia-00e0-4c01-04df
+   MoblyParams:
+     LogPath: logs
    EOF
    ```
 
+   Replace `fuchsia-00e0-4c01-04df` with your device's nodename, or
+   `fuchsia-emulator` if using an emulator. The nodename can be found by looking
+   for a log similar to the one below.
+
+   ```text
+   [0.524][klog][klog][I] netsvc: nodename='fuchsia-emulator'
+   ```
+
 4. Run the sanity test
 
    ```sh
-   antlion -c config/simple.json -tc Sl4fSanityTest
+   python tests/examples/Sl4fSanityTest.py -c simple-config.yaml
    ```
 
-See `antlion -h` for more full usage.
-
 ## Contributing
 
-Contributions are what make open source a great place to learn, inspire, and
-create. Any contributions you make are **greatly appreciated**.
+Contributions are what make open source projects a great place to learn,
+inspire, and create. Any contributions you make are **greatly appreciated**.
+If you have a suggestion that would make this better, please create a CL.
 
-If you have a suggestion that would make this better, please create a pull
-request.
+Before contributing, additional setup is necessary:
 
-1. Create a feature branch (`git checkout -b feature/amazing-feature`)
-2. Document your change in `CHANGELOG.md`
-3. Commit changes (`git commit -m 'Add some amazing feature'`)
-4. Upload CL (`git push origin HEAD:refs/for/main`)
+- Install developer Python packages for formatting and linting
+
+  ```sh
+  pip install --editable ".[dev]"
+  ```
+
+- Install an [EditorConfig](https://editorconfig.org/) plugin for consistent
+  whitespace
+
+- Complete the steps in '[Contribute source changes]' to gain authorization to
+  upload CLs to Fuchsia's Gerrit.
+
+To create a CL:
+
+1. Create a branch (`git checkout -b feature/amazing-feature`)
+2. Make changes
+3. Document the changes in `CHANGELOG.md`
+4. Auto-format changes (`./format.sh`)
+
+   > Note: antlion follows the [Black code style] (rather than the
+   > [Google Python Style Guide])
+
+5. Verify no typing errors (`mypy .`)
+6. Commit changes (`git add . && git commit -m 'Add some amazing feature'`)
+7. Upload CL (`git push origin HEAD:refs/for/main`)
 
 > A public bug tracker is not (yet) available.
 
+[Black code style]: https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html
+[Google Python Style Guide]: https://google.github.io/styleguide/pyguide.html
+[Contribute source changes]: https://fuchsia.dev/fuchsia-src/development/source_code/contribute_changes#prerequisites
+
 ### Recommended git aliases
 
 There are a handful of git commands that will be commonly used throughout the
@@ -87,6 +194,13 @@
   uc = push origin HEAD:refs/for/main%l=Commit-Queue+1,l=Fuchsia-Auto-Submit+1,publish-comments,r=sbalana
 ```
 
+You may also want to add a section to ignore the project's large formatting changes:
+
+```gitconfig
+[blame]
+  ignoreRevsFile = .git-blame-ignore-revs
+```
+
 ## License
 
 Distributed under the Apache 2.0 License. See `LICENSE` for more information.
diff --git a/antlion_host_test.gni b/antlion_host_test.gni
new file mode 100644
index 0000000..d9bdd89
--- /dev/null
+++ b/antlion_host_test.gni
@@ -0,0 +1,195 @@
+# Copyright 2024 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/host.gni")
+import("//build/python/python_binary.gni")
+import("//build/rust/rustc_binary.gni")
+import("//build/testing/host_test.gni")
+import("//build/testing/host_test_data.gni")
+
+# Declares a host-side antlion test.
+#
+# Examples
+#
+# ```
+# antlion_host_test("sl4f_sanity_test") {
+#   main_source = "Sl4fSanityTest.py"
+# }
+#
+# antlion_host_test("wlan_rvr_test_2g") {
+#   main_source = "WlanRvrTest.py"
+#   test_params = "rvr_settings.yaml"
+#   test_cases = [ "test_rvr_11n_2g_*" ]
+# }
+# ```
+#
+# Parameters
+#
+#  main_source
+#    The .py file defining the antlion test.
+#    Type: path
+#
+#  sources (optional)
+#    Other files that are used in the test.
+#    Type: list(path)
+#    Default: empty list
+#
+#  test_params (optional)
+#    Path to a YAML file with additional test parameters. This will be provided
+#    to the test in the antlion config under the "test_params" key.
+#    Type: string
+#
+#  test_cases (optional)
+#    List of test cases to run. Defaults to running all test cases.
+#    Type: list(string)
+#
+#  enable_honeydew (optional)
+#    Flag to enable using honeydew package
+#    Type: bool
+#
+#  test_data_deps (optional)
+#    List of test data GN targets that are needed at runtime.
+#    Type: list(string)
+#    Default: empty list
+#
+#   deps
+#   environments
+#   visibility
+template("antlion_host_test") {
+  assert(defined(invoker.main_source), "main_source is required")
+
+  #
+  # Define antlion test python_binary().
+  #
+  _python_binary_name = "${target_name}.pyz"
+  _python_binary_target = "${target_name}_python_binary"
+  python_binary(_python_binary_target) {
+    forward_variables_from(invoker,
+                           [
+                             "main_source",
+                             "sources",
+                           ])
+    output_name = _python_binary_name
+    main_callable = "test_runner.main" # Mobly-specific entry point.
+    deps = [ "//third_party/antlion" ]
+    testonly = true
+    visibility = [ ":*" ]
+  }
+
+  _test_dir = "${root_out_dir}/test_data/" + get_label_info(target_name, "dir")
+
+  #
+  # Define antlion test host_test_data().
+  #
+  _host_test_data_target = "${target_name}_test_data"
+  host_test_data(_host_test_data_target) {
+    testonly = true
+    visibility = [ ":*" ]
+    sources = [ get_label_info(":${_python_binary_target}", "target_out_dir") +
+                "/${_python_binary_name}" ]
+    outputs = [ "${_test_dir}/${_python_binary_name}" ]
+    deps = [ ":${_python_binary_target}" ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+  }
+
+  #
+  # Define SSH binary host_test_data().
+  #
+  _host_test_data_ssh = "${target_name}_test_data_ssh"
+  host_test_data(_host_test_data_ssh) {
+    testonly = true
+    visibility = [ ":*" ]
+    sources = [ "//prebuilt/third_party/openssh-portable/${host_os}-${host_cpu}/bin/ssh" ]
+    outputs = [ "${_test_dir}/ssh" ]
+  }
+
+  #
+  # Define Mobly test params YAML host_test_data().
+  #
+  if (defined(invoker.test_params)) {
+    _host_test_data_test_params = "${target_name}_test_data_test_params"
+    host_test_data(_host_test_data_test_params) {
+      testonly = true
+      visibility = [ ":*" ]
+      sources = [ invoker.test_params ]
+      outputs = [ "${_test_dir}/${invoker.test_params}" ]
+    }
+  }
+
+  #
+  # Define FFX binary host_test_data().
+  #
+  _host_test_data_ffx = "${target_name}_test_data_ffx"
+  host_test_data(_host_test_data_ffx) {
+    testonly = true
+    visibility = [ ":*" ]
+    sources = [ get_label_info("//src/developer/ffx", "root_out_dir") + "/ffx" ]
+    outputs = [ "${_test_dir}/ffx" ]
+    deps = [ "//src/developer/ffx:ffx_bin($host_toolchain)" ]
+  }
+
+  #
+  # Define the antlion host_test() using antlion-runner.
+  #
+  host_test(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "environments",
+                             "visibility",
+                           ])
+
+    binary_path = "${root_out_dir}/antlion-runner"
+
+    args = [
+      "--python-bin",
+      rebase_path(python_exe_src, root_build_dir),
+      "--antlion-pyz",
+      rebase_path("${_test_dir}/${_python_binary_name}", root_build_dir),
+      "--out-dir",
+      rebase_path("${_test_dir}", root_build_dir),
+      "--ffx-binary",
+      rebase_path("${_test_dir}/ffx", root_build_dir),
+      "--ffx-subtools-search-path",
+      rebase_path(host_tools_dir, root_build_dir),
+      "--ssh-binary",
+      rebase_path("${_test_dir}/ssh", root_build_dir),
+    ]
+
+    if (defined(invoker.test_cases)) {
+      args += invoker.test_cases
+    }
+
+    if (defined(invoker.enable_honeydew) && invoker.enable_honeydew) {
+      args += ["--enable-honeydew"]
+    }
+
+    data_deps = [ "//src/developer/ffx:suite_test_data" ]
+
+    deps = [
+      ":${_host_test_data_ffx}",
+      ":${_host_test_data_ssh}",
+      ":${_host_test_data_target}",
+      "//build/python:interpreter",
+      "//third_party/antlion/runner",
+    ]
+
+    if (defined(invoker.test_params)) {
+      args += [
+        "--test-params",
+        rebase_path("${_test_dir}/${invoker.test_params}", root_build_dir),
+      ]
+      deps += [ ":${_host_test_data_test_params}" ]
+    }
+
+    if (defined(invoker.enable_honeydew) && invoker.enable_honeydew) {
+      deps += [ "//src/testing/end_to_end/honeydew" ]
+    }
+
+    if (defined(invoker.test_data_deps)) {
+      deps += invoker.test_data_deps
+    }
+  }
+}
diff --git a/environments.gni b/environments.gni
new file mode 100644
index 0000000..d19b903
--- /dev/null
+++ b/environments.gni
@@ -0,0 +1,188 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/testing/environments.gni")
+
+astro_ap_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Astro"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+astro_ap_iperf_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Astro"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+astro_ap_iperf_attenuator_env = {
+  dimensions = {
+    access_points = "1"
+    attenuators = "1"
+    device_type = "Astro"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+sherlock_ap_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Sherlock"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+sherlock_ap_iperf_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Sherlock"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+sherlock_ap_iperf_attenuator_env = {
+  dimensions = {
+    access_points = "1"
+    attenuators = "1"
+    device_type = "Sherlock"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+nelson_ap_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Nelson"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+nelson_ap_iperf_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Nelson"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+nelson_ap_iperf_attenuator_env = {
+  dimensions = {
+    access_points = "1"
+    attenuators = "1"
+    device_type = "Nelson"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+nuc11_ap_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Intel NUC Kit NUC11TNHv5"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+nuc11_ap_iperf_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Intel NUC Kit NUC11TNHv5"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+nuc11_ap_iperf_attenuator_env = {
+  dimensions = {
+    access_points = "1"
+    attenuators = "1"
+    device_type = "Intel NUC Kit NUC11TNHv5"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+vim3_ap_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Vim3"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+vim3_ap_iperf_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Vim3"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+vim3_ap_iperf_attenuator_env = {
+  dimensions = {
+    access_points = "1"
+    attenuators = "1"
+    device_type = "Vim3"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+# Display environments supported by antlion.
+display_envs = [
+  astro_env,
+  sherlock_env,
+  nelson_env,
+  nuc11_env,
+  vim3_env,
+]
+
+display_ap_envs = [
+  astro_ap_env,
+  sherlock_ap_env,
+  nelson_ap_env,
+  nuc11_ap_env,
+  vim3_ap_env,
+]
+
+display_ap_iperf_envs = [
+  astro_ap_iperf_env,
+  sherlock_ap_iperf_env,
+  nelson_ap_iperf_env,
+  nuc11_ap_iperf_env,
+  vim3_ap_iperf_env,
+]
+
+display_ap_iperf_attenuator_envs = [
+  astro_ap_iperf_attenuator_env,
+  sherlock_ap_iperf_attenuator_env,
+  nelson_ap_iperf_attenuator_env,
+  nuc11_ap_iperf_attenuator_env,
+  vim3_ap_iperf_attenuator_env,
+]
diff --git a/format.sh b/format.sh
new file mode 100755
index 0000000..d6341f1
--- /dev/null
+++ b/format.sh
@@ -0,0 +1,46 @@
+#!/bin/bash
+
+# Detect trivial unused code.
+#
+# Automatically removal is possible, but is considered an unsafe operation. When a
+# change hasn't been commited, automatic removal could cause unintended irreversible
+# loss of in-progress code.
+#
+# Note: This cannot detect unused code between modules or packages. For complex unused
+# code detection, vulture should be used.
+autoflake \
+	--quiet \
+	--check-diff \
+	--remove-duplicate-keys \
+	--remove-unused-variables \
+	--remove-all-unused-imports \
+	--recursive .
+
+if [ $? -eq 0 ]; then
+	echo "No unused code found"
+else
+	echo ""
+	echo "====================="
+	echo "Unused code detected!"
+	echo "====================="
+	echo ""
+	echo "If these changes are trivial, consider running:"
+	echo "\"autoflake --in-place --remove-unused-variables --remove-all-unused-imports -r .\""
+	echo ""
+	read -p "Run this command to remove all unused code? [y/n] " -n 1 -r
+	echo ""
+	echo ""
+
+	if [[ $REPLY =~ ^[Yy]$ ]]; then
+		autoflake --in-place --remove-unused-variables --remove-all-unused-imports -r .
+	else
+		exit 1
+	fi
+fi
+
+# Sort imports to avoid bikeshedding.
+isort .
+
+# Format code; also to avoid bikeshedding.
+black .
+
diff --git a/src/antlion/__init__.py b/packages/antlion/__init__.py
similarity index 100%
rename from src/antlion/__init__.py
rename to packages/antlion/__init__.py
diff --git a/packages/antlion/base_test.py b/packages/antlion/base_test.py
new file mode 100755
index 0000000..4d2206c
--- /dev/null
+++ b/packages/antlion/base_test.py
@@ -0,0 +1,1027 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import fnmatch
+import functools
+import importlib
+import inspect
+import logging
+import os
+import re
+import traceback
+from concurrent.futures import ThreadPoolExecutor
+from typing import Callable
+
+from mobly import asserts
+from mobly.base_test import BaseTestClass as MoblyBaseTest
+from mobly.base_test import Error as MoblyError
+from mobly.records import ExceptionRecord
+
+from antlion import error, keys, logger, records, signals, tracelogger, utils
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+from antlion.event import event_bus, subscription_bundle
+from antlion.event.decorators import subscribe_static
+from antlion.event.event import (
+    TestCaseBeginEvent,
+    TestCaseEndEvent,
+    TestClassBeginEvent,
+    TestClassEndEvent,
+)
+from antlion.event.subscription_bundle import SubscriptionBundle
+
+# Macro strings for test result reporting
+TEST_CASE_TOKEN = "[Test Case]"
+RESULT_LINE_TEMPLATE = f"{TEST_CASE_TOKEN} %s %s"
+
+
+@subscribe_static(TestCaseBeginEvent)
+def _logcat_log_test_begin(event):
+    """Ensures that logcat is running. Write a logcat line indicating test case
+    begin."""
+    test_instance = event.test_class
+    try:
+        for ad in getattr(test_instance, "android_devices", []):
+            if not ad.is_adb_logcat_on:
+                ad.start_adb_logcat()
+            # Write test start token to adb log if android device is attached.
+            if not ad.skip_sl4a and ad.droid:
+                ad.droid.logV(f"{TEST_CASE_TOKEN} BEGIN {event.test_case_name}")
+
+    except error.ActsError as e:
+        test_instance.results.error.append(
+            ExceptionRecord(e, f"Logcat for test begin: {event.test_case_name}")
+        )
+        test_instance.log.error(f"BaseTest setup_test error: {e.details}")
+    except Exception as e:
+        test_instance.log.warning("Unable to send BEGIN log command to all devices.")
+        test_instance.log.warning(f"Error: {e}")
+
+
+@subscribe_static(TestCaseEndEvent)
+def _logcat_log_test_end(event):
+    """Write a logcat line indicating test case end."""
+    test_instance = event.test_class
+    try:
+        # Write test end token to adb log if android device is attached.
+        for ad in getattr(test_instance, "android_devices", []):
+            if not ad.skip_sl4a and ad.droid:
+                ad.droid.logV(f"{TEST_CASE_TOKEN} END {event.test_case_name}")
+
+    except error.ActsError as e:
+        test_instance.results.error.append(
+            ExceptionRecord(e, f"Logcat for test end: {event.test_case_name}")
+        )
+        test_instance.log.error(f"BaseTest teardown_test error: {e.details}")
+    except Exception as e:
+        test_instance.log.warning("Unable to send END log command to all devices.")
+        test_instance.log.warning(f"Error: {e}")
+
+
+@subscribe_static(TestCaseBeginEvent)
+def _syslog_log_test_begin(event):
+    """This adds a BEGIN log message with the test name to the syslog of any
+    Fuchsia device"""
+    test_instance = event.test_class
+    try:
+        fd: FuchsiaDevice
+        for fd in getattr(test_instance, "fuchsia_devices", []):
+            if hasattr(fd, "_sl4f"):
+                fd.sl4f.logging_lib.logI(
+                    f"{TEST_CASE_TOKEN} BEGIN {event.test_case_name}"
+                )
+
+    except Exception as e:
+        test_instance.log.warning("Unable to send BEGIN log command to all devices.")
+        test_instance.log.warning(f"Error: {e}")
+
+
+@subscribe_static(TestCaseEndEvent)
+def _syslog_log_test_end(event):
+    """This adds a END log message with the test name to the syslog of any
+    Fuchsia device"""
+    test_instance = event.test_class
+    try:
+        fd: FuchsiaDevice
+        for fd in getattr(test_instance, "fuchsia_devices", []):
+            if hasattr(fd, "_sl4f"):
+                fd.sl4f.logging_lib.logI(
+                    f"{TEST_CASE_TOKEN} END {event.test_case_name}"
+                )
+
+    except Exception as e:
+        test_instance.log.warning("Unable to send END log command to all devices.")
+        test_instance.log.warning(f"Error: {e}")
+
+
+event_bus.register_subscription(_logcat_log_test_begin.subscription)
+event_bus.register_subscription(_logcat_log_test_end.subscription)
+event_bus.register_subscription(_syslog_log_test_begin.subscription)
+event_bus.register_subscription(_syslog_log_test_end.subscription)
+
+
+class Error(Exception):
+    """Raised for exceptions that occured in BaseTestClass."""
+
+
+class BaseTestClass(MoblyBaseTest):
+    """Base class for all test classes to inherit from. Inherits some
+    functionality from Mobly's base test class.
+
+    This class gets all the controller objects from test_runner and executes
+    the test cases requested within itself.
+
+    Most attributes of this class are set at runtime based on the configuration
+    provided.
+
+    Attributes:
+        tests: A list of strings, each representing a test case name.
+        TAG: A string used to refer to a test class. Default is the test class
+             name.
+        log: A logger object used for logging.
+        results: A records.TestResult object for aggregating test results from
+                 the execution of test cases.
+        controller_configs: A dict of controller configs provided by the user
+                            via the testbed config.
+        consecutive_failures: Tracks the number of consecutive test case
+                              failures within this class.
+        consecutive_failure_limit: Number of consecutive test failures to allow
+                                   before blocking remaining tests in the same
+                                   test class.
+        size_limit_reached: True if the size of the log directory has reached
+                            its limit.
+        current_test_name: A string that's the name of the test case currently
+                           being executed. If no test is executing, this should
+                           be None.
+    """
+
+    TAG = None
+
+    def __init__(self, configs):
+        """Initializes a BaseTestClass given a TestRunConfig, which provides
+        all of the config information for this test class.
+
+        Args:
+            configs: A config_parser.TestRunConfig object.
+        """
+        super().__init__(configs)
+
+        self.__handle_file_user_params()
+
+        self.class_subscriptions = SubscriptionBundle()
+        self.class_subscriptions.register()
+        self.all_subscriptions = [self.class_subscriptions]
+
+        self.current_test_name = None
+        self.log = tracelogger.TraceLogger(logging.getLogger())
+        # TODO: remove after converging log path definitions with mobly
+        self.log_path = configs.log_path
+
+        self.consecutive_failures = 0
+        self.consecutive_failure_limit = self.user_params.get(
+            "consecutive_failure_limit", -1
+        )
+        self.size_limit_reached = False
+        self.retryable_exceptions = signals.TestFailure
+
+    def _import_builtin_controllers(self):
+        """Import built-in controller modules.
+
+        Go through the testbed configs, find any built-in controller configs
+        and import the corresponding controller module from antlion.controllers
+        package.
+
+        Returns:
+            A list of controller modules.
+        """
+        builtin_controllers = []
+        for ctrl_name in keys.Config.builtin_controller_names.value:
+            if ctrl_name in self.controller_configs:
+                module_name = keys.get_module_name(ctrl_name)
+                module = importlib.import_module(f"antlion.controllers.{module_name}")
+                builtin_controllers.append(module)
+        return builtin_controllers
+
+    def __handle_file_user_params(self):
+        """For backwards compatibility, moves all contents of the "files" dict
+        into the root level of user_params.
+
+        This allows existing tests to run with the new Mobly-style format
+        without needing to make changes.
+        """
+        for key, value in self.user_params.items():
+            if key.endswith("files") and isinstance(value, dict):
+                new_user_params = dict(value)
+                new_user_params.update(self.user_params)
+                self.user_params = new_user_params
+                break
+
+    @staticmethod
+    def get_module_reference_name(a_module):
+        """Returns the module's reference name.
+
+        This is largely for backwards compatibility with log parsing. If the
+        module defines ACTS_CONTROLLER_REFERENCE_NAME, it will return that
+        value, or the module's submodule name.
+
+        Args:
+            a_module: Any module. Ideally, a controller module.
+        Returns:
+            A string corresponding to the module's name.
+        """
+        if hasattr(a_module, "ACTS_CONTROLLER_REFERENCE_NAME"):
+            return a_module.ACTS_CONTROLLER_REFERENCE_NAME
+        else:
+            return a_module.__name__.split(".")[-1]
+
+    def register_controller(self, controller_module, required=True, builtin=False):
+        """Registers an ACTS controller module for a test class. Invokes Mobly's
+        implementation of register_controller.
+
+        An ACTS controller module is a Python lib that can be used to control
+        a device, service, or equipment. To be ACTS compatible, a controller
+        module needs to have the following members:
+
+            def create(configs):
+                [Required] Creates controller objects from configurations.
+                Args:
+                    configs: A list of serialized data like string/dict. Each
+                             element of the list is a configuration for a
+                             controller object.
+                Returns:
+                    A list of objects.
+
+            def destroy(objects):
+                [Required] Destroys controller objects created by the create
+                function. Each controller object shall be properly cleaned up
+                and all the resources held should be released, e.g. memory
+                allocation, sockets, file handlers etc.
+                Args:
+                    A list of controller objects created by the create function.
+
+            def get_info(objects):
+                [Optional] Gets info from the controller objects used in a test
+                run. The info will be included in test_result_summary.json under
+                the key "ControllerInfo". Such information could include unique
+                ID, version, or anything that could be useful for describing the
+                test bed and debugging.
+                Args:
+                    objects: A list of controller objects created by the create
+                             function.
+                Returns:
+                    A list of json serializable objects, each represents the
+                    info of a controller object. The order of the info object
+                    should follow that of the input objects.
+        Registering a controller module declares a test class's dependency the
+        controller. If the module config exists and the module matches the
+        controller interface, controller objects will be instantiated with
+        corresponding configs. The module should be imported first.
+
+        Args:
+            controller_module: A module that follows the controller module
+                interface.
+            required: A bool. If True, failing to register the specified
+                controller module raises exceptions. If False, returns None upon
+                failures.
+            builtin: Specifies that the module is a builtin controller module in
+                ACTS. If true, adds itself to test attributes.
+        Returns:
+            A list of controller objects instantiated from controller_module, or
+            None.
+
+        Raises:
+            When required is True, ControllerError is raised if no corresponding
+            config can be found.
+            Regardless of the value of "required", ControllerError is raised if
+            the controller module has already been registered or any other error
+            occurred in the registration process.
+        """
+        module_ref_name = self.get_module_reference_name(controller_module)
+        module_config_name = controller_module.MOBLY_CONTROLLER_CONFIG_NAME
+
+        # Get controller objects from Mobly's register_controller
+        controllers = self._controller_manager.register_controller(
+            controller_module, required=required
+        )
+        if not controllers:
+            return None
+
+        # Log controller information
+        # Implementation of "get_info" is optional for a controller module.
+        if hasattr(controller_module, "get_info"):
+            controller_info = controller_module.get_info(controllers)
+            self.log.info("Controller %s: %s", module_config_name, controller_info)
+
+        if builtin:
+            setattr(self, module_ref_name, controllers)
+        return controllers
+
+    def _setup_class(self):
+        """Proxy function to guarantee the base implementation of setup_class
+        is called.
+        """
+        event_bus.post(TestClassBeginEvent(self))
+        # Import and register the built-in controller modules specified
+        # in testbed config.
+        for module in self._import_builtin_controllers():
+            self.register_controller(module, builtin=True)
+        return self.setup_class()
+
+    def _teardown_class(self):
+        """Proxy function to guarantee the base implementation of teardown_class
+        is called.
+        """
+        super()._teardown_class()
+        event_bus.post(TestClassEndEvent(self, self.results))
+
+    def _setup_test(self, test_name):
+        """Proxy function to guarantee the base implementation of setup_test is
+        called.
+        """
+        self.current_test_name = test_name
+
+        # Skip the test if the consecutive test case failure limit is reached.
+        if self.consecutive_failures == self.consecutive_failure_limit:
+            raise signals.TestError("Consecutive test failure")
+
+        return self.setup_test()
+
+    def setup_test(self):
+        """Setup function that will be called every time before executing each
+        test case in the test class.
+
+        To signal setup failure, return False or raise an exception. If
+        exceptions were raised, the stack trace would appear in log, but the
+        exceptions would not propagate to upper levels.
+
+        Implementation is optional.
+        """
+        return True
+
+    def _teardown_test(self, test_name):
+        """Proxy function to guarantee the base implementation of teardown_test
+        is called.
+        """
+        self.log.debug(f"Tearing down test {test_name}")
+        self.teardown_test()
+
+    def _on_fail(self, record):
+        """Proxy function to guarantee the base implementation of on_fail is
+        called.
+
+        Args:
+            record: The records.TestResultRecord object for the failed test
+                    case.
+        """
+        self.consecutive_failures += 1
+        if record.details:
+            self.log.error(record.details)
+        self.log.info(RESULT_LINE_TEMPLATE, record.test_name, record.result)
+        self.on_fail(record.test_name, record.begin_time)
+
+    def on_fail(self, test_name, begin_time):
+        """A function that is executed upon a test case failure.
+
+        User implementation is optional.
+
+        Args:
+            test_name: Name of the test that triggered this function.
+            begin_time: Logline format timestamp taken when the test started.
+        """
+
+    def _on_pass(self, record):
+        """Proxy function to guarantee the base implementation of on_pass is
+        called.
+
+        Args:
+            record: The records.TestResultRecord object for the passed test
+                    case.
+        """
+        self.consecutive_failures = 0
+        msg = record.details
+        if msg:
+            self.log.info(msg)
+        self.log.info(RESULT_LINE_TEMPLATE, record.test_name, record.result)
+        self.on_pass(record.test_name, record.begin_time)
+
+    def on_pass(self, test_name, begin_time):
+        """A function that is executed upon a test case passing.
+
+        Implementation is optional.
+
+        Args:
+            test_name: Name of the test that triggered this function.
+            begin_time: Logline format timestamp taken when the test started.
+        """
+
+    def _on_skip(self, record):
+        """Proxy function to guarantee the base implementation of on_skip is
+        called.
+
+        Args:
+            record: The records.TestResultRecord object for the skipped test
+                    case.
+        """
+        self.log.info(RESULT_LINE_TEMPLATE, record.test_name, record.result)
+        self.log.info("Reason to skip: %s", record.details)
+        self.on_skip(record.test_name, record.begin_time)
+
+    def on_skip(self, test_name, begin_time):
+        """A function that is executed upon a test case being skipped.
+
+        Implementation is optional.
+
+        Args:
+            test_name: Name of the test that triggered this function.
+            begin_time: Logline format timestamp taken when the test started.
+        """
+
+    def _on_exception(self, record):
+        """Proxy function to guarantee the base implementation of on_exception
+        is called.
+
+        Args:
+            record: The records.TestResultRecord object for the failed test
+                    case.
+        """
+        self.log.exception(record.details)
+        self.on_exception(record.test_name, record.begin_time)
+
+    def on_exception(self, test_name, begin_time):
+        """A function that is executed upon an unhandled exception from a test
+        case.
+
+        Implementation is optional.
+
+        Args:
+            test_name: Name of the test that triggered this function.
+            begin_time: Logline format timestamp taken when the test started.
+        """
+
+    def on_retry(self):
+        """Function to run before retrying a test through get_func_with_retry.
+
+        This function runs when a test is automatically retried. The function
+        can be used to modify internal test parameters, for example, to retry
+        a test with slightly different input variables.
+        """
+
+    def _exec_procedure_func(self, func, tr_record):
+        """Executes a procedure function like on_pass, on_fail etc.
+
+        This function will alternate the 'Result' of the test's record if
+        exceptions happened when executing the procedure function.
+
+        This will let signals.TestAbortAll through so abort_all works in all
+        procedure functions.
+
+        Args:
+            func: The procedure function to be executed.
+            tr_record: The TestResultRecord object associated with the test
+                       case executed.
+        """
+        try:
+            func(tr_record)
+        except signals.TestAbortAll:
+            raise
+        except Exception as e:
+            self.log.exception(
+                "Exception happened when executing %s for %s.",
+                func.__name__,
+                self.current_test_name,
+            )
+            tr_record.add_error(func.__name__, e)
+
+    def exec_one_testcase(self, test_name, test_func):
+        """Executes one test case and update test results.
+
+        Executes one test case, create a records.TestResultRecord object with
+        the execution information, and add the record to the test class's test
+        results.
+
+        Args:
+            test_name: Name of the test.
+            test_func: The test function.
+        """
+        class_name = self.__class__.__name__
+        tr_record = records.TestResultRecord(test_name, class_name)
+        tr_record.test_begin()
+        self.begin_time = int(tr_record.begin_time)
+        self.log_begin_time = tr_record.log_begin_time
+        self.test_name = tr_record.test_name
+        event_bus.post(TestCaseBeginEvent(self, self.test_name))
+        self.log.info("%s %s", TEST_CASE_TOKEN, test_name)
+
+        # Enable test retry if specified in the ACTS config
+        retry_tests = self.user_params.get("retry_tests", [])
+        full_test_name = f"{class_name}.{self.test_name}"
+        if any(name in retry_tests for name in [class_name, full_test_name]):
+            test_func = self.get_func_with_retry(test_func)
+
+        verdict = None
+        test_signal = None
+        try:
+            try:
+                ret = self._setup_test(self.test_name)
+                asserts.assert_true(ret is not False, f"Setup for {test_name} failed.")
+                verdict = test_func()
+            finally:
+                try:
+                    self._teardown_test(self.test_name)
+                except signals.TestAbortAll:
+                    raise
+                except Exception as e:
+                    self.log.error(traceback.format_exc())
+                    tr_record.add_error("teardown_test", e)
+        except (signals.TestFailure, AssertionError) as e:
+            test_signal = e
+            if self.user_params.get(
+                keys.Config.key_test_failure_tracebacks.value, False
+            ):
+                self.log.exception(e)
+            tr_record.test_fail(e)
+        except signals.TestSkip as e:
+            # Test skipped.
+            test_signal = e
+            tr_record.test_skip(e)
+        except (signals.TestAbortClass, signals.TestAbortAll) as e:
+            # Abort signals, pass along.
+            test_signal = e
+            tr_record.test_fail(e)
+            raise e
+        except signals.TestPass as e:
+            # Explicit test pass.
+            test_signal = e
+            tr_record.test_pass(e)
+        except Exception as e:
+            test_signal = e
+            self.log.error(traceback.format_exc())
+            # Exception happened during test.
+            tr_record.test_error(e)
+        else:
+            if verdict or (verdict is None):
+                # Test passed.
+                tr_record.test_pass()
+                return
+            tr_record.test_fail()
+        finally:
+            tr_record.update_record()
+            try:
+                # Execute post-test procedures
+                result = tr_record.result
+                if result == records.TestResultEnums.TEST_RESULT_PASS:
+                    self._exec_procedure_func(self._on_pass, tr_record)
+                elif result == records.TestResultEnums.TEST_RESULT_FAIL:
+                    self._exec_procedure_func(self._on_fail, tr_record)
+                elif result == records.TestResultEnums.TEST_RESULT_SKIP:
+                    self._exec_procedure_func(self._on_skip, tr_record)
+                elif result == records.TestResultEnums.TEST_RESULT_ERROR:
+                    self._exec_procedure_func(self._on_exception, tr_record)
+                    self._exec_procedure_func(self._on_fail, tr_record)
+            finally:
+                self.results.add_record(tr_record)
+                self.summary_writer.dump(
+                    tr_record.to_dict(), records.TestSummaryEntryType.RECORD
+                )
+                self.current_test_name = None
+                event_bus.post(TestCaseEndEvent(self, self.test_name, test_signal))
+
+    def get_func_with_retry(self, func, attempts=2):
+        """Returns a wrapped test method that re-runs after failure. Return test
+        result upon success. If attempt limit reached, collect all failure
+        messages and raise a TestFailure signal.
+
+        Params:
+            func: The test method
+            attempts: Number of attempts to run test
+
+        Returns: result of the test method
+        """
+        exceptions = self.retryable_exceptions
+
+        def wrapper(*args, **kwargs):
+            error_msgs = []
+            extras = {}
+            retry = False
+            for i in range(attempts):
+                try:
+                    if retry:
+                        self.teardown_test()
+                        self.setup_test()
+                        self.on_retry()
+                    return func(*args, **kwargs)
+                except exceptions as e:
+                    retry = True
+                    msg = f"Failure on attempt {i + 1}: {e.details}"
+                    self.log.warning(msg)
+                    error_msgs.append(msg)
+                    if e.extras:
+                        extras[f"Attempt {i + 1}"] = e.extras
+            raise signals.TestFailure("\n".join(error_msgs), extras)
+
+        return wrapper
+
+    def run_generated_testcases(
+        self,
+        test_func,
+        settings,
+        args=None,
+        kwargs=None,
+        tag="",
+        name_func=None,
+        format_args=False,
+    ):
+        """Deprecated. Please use pre_run and generate_tests.
+
+        Generated test cases are not written down as functions, but as a list
+        of parameter sets. This way we reduce code repetition and improve
+        test case scalability.
+
+        Args:
+            test_func: The common logic shared by all these generated test
+                       cases. This function should take at least one argument,
+                       which is a parameter set.
+            settings: A list of strings representing parameter sets. These are
+                      usually json strings that get loaded in the test_func.
+            args: Iterable of additional position args to be passed to
+                  test_func.
+            kwargs: Dict of additional keyword args to be passed to test_func
+            tag: Name of this group of generated test cases. Ignored if
+                 name_func is provided and operates properly.
+            name_func: A function that takes a test setting and generates a
+                       proper test name. The test name should be shorter than
+                       utils.MAX_FILENAME_LEN. Names over the limit will be
+                       truncated.
+            format_args: If True, args will be appended as the first argument
+                         in the args list passed to test_func.
+
+        Returns:
+            A list of settings that did not pass.
+        """
+        args = args or ()
+        kwargs = kwargs or {}
+        failed_settings = []
+
+        for setting in settings:
+            test_name = f"{tag} {setting}"
+
+            if name_func:
+                try:
+                    test_name = name_func(setting, *args, **kwargs)
+                except:
+                    self.log.exception(
+                        (
+                            "Failed to get test name from "
+                            "test_func. Fall back to default %s"
+                        ),
+                        test_name,
+                    )
+
+            self.results.requested.append(test_name)
+
+            if len(test_name) > utils.MAX_FILENAME_LEN:
+                test_name = test_name[: utils.MAX_FILENAME_LEN]
+
+            previous_success_cnt = len(self.results.passed)
+
+            if format_args:
+                self.exec_one_testcase(
+                    test_name,
+                    functools.partial(test_func, *(args + (setting,)), **kwargs),
+                )
+            else:
+                self.exec_one_testcase(
+                    test_name,
+                    functools.partial(test_func, *((setting,) + args), **kwargs),
+                )
+
+            if len(self.results.passed) - previous_success_cnt != 1:
+                failed_settings.append(setting)
+
+        return failed_settings
+
+    def _exec_func(self, func, *args):
+        """Executes a function with exception safeguard.
+
+        This will let signals.TestAbortAll through so abort_all works in all
+        procedure functions.
+
+        Args:
+            func: Function to be executed.
+            args: Arguments to be passed to the function.
+
+        Returns:
+            Whatever the function returns, or False if unhandled exception
+            occured.
+        """
+        try:
+            return func(*args)
+        except signals.TestAbortAll:
+            raise
+        except:
+            self.log.exception(
+                "Exception happened when executing %s in %s.", func.__name__, self.TAG
+            )
+            return False
+
+    def _block_all_test_cases(self, tests, reason="Failed class setup"):
+        """
+        Block all passed in test cases.
+        Args:
+            tests: The tests to block.
+            reason: Message describing the reason that the tests are blocked.
+                Default is 'Failed class setup'
+        """
+        for test_name, test_func in tests:
+            signal = signals.TestError(reason)
+            record = records.TestResultRecord(test_name, self.TAG)
+            record.test_begin()
+            if hasattr(test_func, "gather"):
+                signal.extras = test_func.gather()
+            record.test_error(signal)
+            self.results.add_record(record)
+            self.summary_writer.dump(
+                record.to_dict(), records.TestSummaryEntryType.RECORD
+            )
+            self._on_skip(record)
+
+    def run(self, test_names=None):
+        """Runs test cases within a test class by the order they appear in the
+        execution list.
+
+        One of these test cases lists will be executed, shown here in priority
+        order:
+        1. The test_names list, which is passed from cmd line.
+        2. The self.tests list defined in test class. Invalid names are
+           ignored.
+        3. All function that matches test case naming convention in the test
+           class.
+
+        Args:
+            test_names: A list of string that are test case names/patterns
+             requested in cmd line.
+
+        Returns:
+            The test results object of this class.
+        """
+        # Executes pre-setup procedures, like generating test methods.
+        if not self._pre_run():
+            return self.results
+
+        self.register_test_class_event_subscriptions()
+        self.log.info("==========> %s <==========", self.TAG)
+        # Devise the actual test cases to run in the test class.
+        if self.tests:
+            # Specified by run list in class.
+            valid_tests = list(self.tests)
+        else:
+            # No test case specified by user, gather the run list automatically.
+            valid_tests = self.get_existing_test_names()
+        if test_names:
+            # Match test cases with any of the user-specified patterns
+            matches = []
+            for test_name in test_names:
+                for valid_test in valid_tests:
+                    if (
+                        fnmatch.fnmatch(valid_test, test_name)
+                        and valid_test not in matches
+                    ):
+                        matches.append(valid_test)
+        else:
+            matches = valid_tests
+        self.results.requested = matches
+        self.summary_writer.dump(
+            self.results.requested_test_names_dict(),
+            records.TestSummaryEntryType.TEST_NAME_LIST,
+        )
+        tests = self._get_test_methods(matches)
+
+        # Setup for the class.
+        setup_fail = False
+        try:
+            if self._setup_class() is False:
+                self.log.error("Failed to setup %s.", self.TAG)
+                self._block_all_test_cases(tests)
+                setup_fail = True
+        except signals.TestAbortClass:
+            self.log.exception(f"Test class {self.TAG} aborted")
+            setup_fail = True
+        except Exception as e:
+            self.log.exception(f"Failed to setup {self.TAG}: {e}")
+            self._block_all_test_cases(tests)
+            setup_fail = True
+        if setup_fail:
+            self._exec_func(self._teardown_class)
+            self.log.info(
+                "Summary for test class %s: %s", self.TAG, self.results.summary_str()
+            )
+            return self.results
+
+        # Run tests in order.
+        test_case_iterations = self.user_params.get(
+            keys.Config.key_test_case_iterations.value, 1
+        )
+        if any(
+            [
+                substr in self.__class__.__name__
+                for substr in ["Preflight", "Postflight"]
+            ]
+        ):
+            test_case_iterations = 1
+        try:
+            for test_name, test_func in tests:
+                for _ in range(test_case_iterations):
+                    self.exec_one_testcase(test_name, test_func)
+            return self.results
+        except signals.TestAbortClass:
+            self.log.exception(f"Test class {self.TAG} aborted")
+            return self.results
+        except signals.TestAbortAll as e:
+            # Piggy-back test results on this exception object so we don't lose
+            # results from this test class.
+            setattr(e, "results", self.results)
+            raise e
+        finally:
+            self._exec_func(self._teardown_class)
+            self.log.info(
+                "Summary for test class %s: %s", self.TAG, self.results.summary_str()
+            )
+
+    def _ad_take_bugreport(self, ad, test_name, begin_time):
+        for i in range(3):
+            try:
+                ad.take_bug_report(test_name, begin_time)
+                return True
+            except Exception as e:
+                ad.log.error("bugreport attempt %s error: %s", i + 1, e)
+
+    def _ad_take_extra_logs(self, ad, test_name, begin_time):
+        result = True
+        if getattr(ad, "qxdm_log", False):
+            # Gather qxdm log modified 3 minutes earlier than test start time
+            if begin_time:
+                qxdm_begin_time = begin_time - 1000 * 60 * 3
+            else:
+                qxdm_begin_time = None
+            try:
+                ad.get_qxdm_logs(test_name, qxdm_begin_time)
+            except Exception as e:
+                ad.log.error(
+                    "Failed to get QXDM log for %s with error %s", test_name, e
+                )
+                result = False
+
+        try:
+            ad.check_crash_report(test_name, begin_time, log_crash_report=True)
+        except Exception as e:
+            ad.log.error(
+                "Failed to check crash report for %s with error %s", test_name, e
+            )
+            result = False
+        return result
+
+    def _skip_bug_report(self, test_name):
+        """A function to check whether we should skip creating a bug report.
+
+        Args:
+            test_name: The test case name
+
+        Returns: True if bug report is to be skipped.
+        """
+        if "no_bug_report_on_fail" in self.user_params:
+            return True
+
+        # If the current test class or test case is found in the set of
+        # problematic tests, we skip bugreport and other failure artifact
+        # creation.
+        class_name = self.__class__.__name__
+        quiet_tests = self.user_params.get("quiet_tests", [])
+        if class_name in quiet_tests:
+            self.log.info("Skipping bug report, as directed for this test class.")
+            return True
+        full_test_name = f"{class_name}.{test_name}"
+        if full_test_name in quiet_tests:
+            self.log.info("Skipping bug report, as directed for this test case.")
+            return True
+
+        # Once we hit a certain log path size, it's not going to get smaller.
+        # We cache the result so we don't have to keep doing directory walks.
+        if self.size_limit_reached:
+            return True
+        try:
+            max_log_size = int(
+                self.user_params.get("soft_output_size_limit") or "invalid"
+            )
+            log_path = getattr(logging, "log_path", None)
+            if log_path:
+                curr_log_size = utils.get_directory_size(log_path)
+                if curr_log_size > max_log_size:
+                    self.log.info(
+                        "Skipping bug report, as we've reached the size limit."
+                    )
+                    self.size_limit_reached = True
+                    return True
+        except ValueError:
+            pass
+        return False
+
+    def _take_bug_report(self, test_name, begin_time):
+        if self._skip_bug_report(test_name):
+            return
+
+        executor = ThreadPoolExecutor(max_workers=10)
+        for ad in getattr(self, "android_devices", []):
+            executor.submit(self._ad_take_bugreport, ad, test_name, begin_time)
+            executor.submit(self._ad_take_extra_logs, ad, test_name, begin_time)
+        executor.shutdown()
+
+    def _reboot_device(self, ad):
+        ad.log.info("Rebooting device.")
+        ad = ad.reboot()
+
+    def _cleanup_logger_sessions(self):
+        for mylogger, session in self.logger_sessions:
+            self.log.info("Resetting a diagnostic session %s, %s", mylogger, session)
+            mylogger.reset()
+        self.logger_sessions = []
+
+    def _pull_diag_logs(self, test_name, begin_time):
+        for mylogger, session in self.logger_sessions:
+            self.log.info("Pulling diagnostic session %s", mylogger)
+            mylogger.stop(session)
+            diag_path = os.path.join(
+                self.log_path, logger.epoch_to_log_line_timestamp(begin_time)
+            )
+            os.makedirs(diag_path, exist_ok=True)
+            mylogger.pull(session, diag_path)
+
+    def register_test_class_event_subscriptions(self):
+        self.class_subscriptions = subscription_bundle.create_from_instance(self)
+        self.class_subscriptions.register()
+
+    def unregister_test_class_event_subscriptions(self):
+        for package in self.all_subscriptions:
+            package.unregister()
+
+
+class AntlionBaseTest(MoblyBaseTest):
+    # TODO(https://github.com/google/mobly/issues/887): Remove this once similar
+    # functionality is merged into Mobly.
+    def _get_test_methods(
+        self, test_names: list[str]
+    ) -> list[tuple[str, Callable[[], None]]]:
+        """Resolves test method names to bound test methods.
+
+        Args:
+            test_names: Test method names.
+
+        Returns:
+            List of tuples containing the test method name and the function implementing
+            its logic.
+
+        Raises:
+            MoblyError: test_names does not match any tests.
+        """
+
+        test_table: dict[str, Callable[[], None]] = {**self._generated_test_table}
+        for name, _ in inspect.getmembers(type(self), callable):
+            if name.startswith("test_"):
+                test_table[name] = getattr(self, name)
+
+        test_methods: list[tuple[str, Callable[[], None]]] = []
+        for test_name in test_names:
+            if test_name in test_table:
+                test_methods.append((test_name, test_table[test_name]))
+            else:
+                try:
+                    pattern = re.compile(test_name)
+                except Exception as e:
+                    raise MoblyError(
+                        f'"{test_name}" is not a valid regular expression'
+                    ) from e
+                for name in test_table:
+                    if pattern.fullmatch(name.strip()):
+                        test_methods.append((name, test_table[name]))
+
+        if len(test_methods) == 0:
+            all_patterns = '" or "'.join(test_names)
+            all_tests = "\n - ".join(test_table.keys())
+            raise MoblyError(
+                f"{self.TAG} does not declare any tests matching "
+                f'"{all_patterns}". Please verify the correctness of '
+                f"{self.TAG} test names: \n - {all_tests}"
+            )
+
+        return test_methods
diff --git a/src/antlion/bin/__init__.py b/packages/antlion/bin/__init__.py
similarity index 100%
rename from src/antlion/bin/__init__.py
rename to packages/antlion/bin/__init__.py
diff --git a/packages/antlion/bin/act.py b/packages/antlion/bin/act.py
new file mode 100755
index 0000000..2726d0c
--- /dev/null
+++ b/packages/antlion/bin/act.py
@@ -0,0 +1,263 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import os
+import signal
+import sys
+import traceback
+
+from mobly import config_parser as mobly_config_parser
+
+from antlion import config_parser, keys, signals, test_runner, utils
+from antlion.config_parser import ActsConfigError
+
+
+def _run_test(parsed_config, test_identifiers, repeat=1):
+    """Instantiate and runs test_runner.TestRunner.
+
+    This is the function to start separate processes with.
+
+    Args:
+        parsed_config: A mobly.config_parser.TestRunConfig that is a set of
+                       configs for one test_runner.TestRunner.
+        test_identifiers: A list of tuples, each identifies what test case to
+                          run on what test class.
+        repeat: Number of times to iterate the specified tests.
+
+    Returns:
+        True if all tests passed without any error, False otherwise.
+    """
+    runner = _create_test_runner(parsed_config, test_identifiers)
+    try:
+        for i in range(repeat):
+            runner.run()
+        return runner.results.is_all_pass
+    except signals.TestAbortAll:
+        return True
+    except:
+        print(f"Exception when executing {runner.testbed_name}, iteration {i}.")
+        print(traceback.format_exc())
+    finally:
+        runner.stop()
+
+
+def _create_test_runner(parsed_config, test_identifiers):
+    """Instantiates one test_runner.TestRunner object and register termination
+    signal handlers that properly shut down the test_runner.TestRunner run.
+
+    Args:
+        parsed_config: A mobly.config_parser.TestRunConfig that is a set of
+                       configs for one test_runner.TestRunner.
+        test_identifiers: A list of tuples, each identifies what test case to
+                          run on what test class.
+
+    Returns:
+        A test_runner.TestRunner object.
+    """
+    try:
+        t = test_runner.TestRunner(parsed_config, test_identifiers)
+    except:
+        print("Failed to instantiate test runner, abort.")
+        print(traceback.format_exc())
+        sys.exit(1)
+    # Register handler for termination signals.
+    handler = config_parser.gen_term_signal_handler([t])
+    signal.signal(signal.SIGTERM, handler)
+    signal.signal(signal.SIGINT, handler)
+    return t
+
+
+def _run_tests(parsed_configs, test_identifiers, repeat):
+    """Executes requested tests sequentially.
+
+    Requested test runs will commence one after another according to the order
+    of their corresponding configs.
+
+    Args:
+        parsed_configs: A list of mobly.config_parser.TestRunConfig, each is a
+                        set of configs for one test_runner.TestRunner.
+        test_identifiers: A list of tuples, each identifies what test case to
+                          run on what test class.
+        repeat: Number of times to iterate the specified tests.
+
+    Returns:
+        True if all test runs executed successfully, False otherwise.
+    """
+    ok = True
+    for c in parsed_configs:
+        try:
+            ret = _run_test(c, test_identifiers, repeat)
+            ok = ok and ret
+        except Exception as e:
+            print(f"Exception occurred when executing test bed {c.testbed_name}. {e}")
+    return ok
+
+
+def main():
+    """This is the default implementation of a cli entry point for ACTS test
+    execution.
+
+    Or you could implement your own cli entry point using acts.config_parser
+    functions and acts.test_runner.execute_one_test_class.
+    """
+    parser = argparse.ArgumentParser(
+        description=(
+            "Specify tests to run. If nothing specified, " "run all test cases found."
+        )
+    )
+    parser.add_argument(
+        "-c",
+        "--config",
+        type=str,
+        required=True,
+        metavar="<PATH>",
+        help="Path to the test configuration file.",
+    )
+    parser.add_argument(
+        "-ci",
+        "--campaign_iterations",
+        metavar="<CAMPAIGN_ITERATIONS>",
+        nargs="?",
+        type=int,
+        const=1,
+        default=1,
+        help="Number of times to run the campaign or a group of test cases.",
+    )
+    parser.add_argument(
+        "-tb",
+        "--testbed",
+        nargs="+",
+        type=str,
+        metavar="[<TEST BED NAME1> <TEST BED NAME2> ...]",
+        help="Specify which test beds to run tests on.",
+    )
+    parser.add_argument(
+        "-lp",
+        "--logpath",
+        type=str,
+        metavar="<PATH>",
+        help="Root path under which all logs will be placed.",
+    )
+    parser.add_argument(
+        "-tp",
+        "--testpaths",
+        nargs="*",
+        type=str,
+        metavar="<PATH> <PATH>",
+        help="One or more non-recursive test class search paths.",
+    )
+
+    group = parser.add_mutually_exclusive_group(required=True)
+    group.add_argument(
+        "-tc",
+        "--testclass",
+        nargs="+",
+        type=str,
+        metavar="[TestClass1 TestClass2:test_xxx ...]",
+        help="A list of test classes/cases to run.",
+    )
+    group.add_argument(
+        "-tf",
+        "--testfile",
+        nargs=1,
+        type=str,
+        metavar="<PATH>",
+        help=(
+            "Path to a file containing a comma delimited list of test "
+            "classes to run."
+        ),
+    )
+    parser.add_argument(
+        "-ti",
+        "--test_case_iterations",
+        metavar="<TEST_CASE_ITERATIONS>",
+        nargs="?",
+        type=int,
+        help="Number of times to run every test case.",
+    )
+
+    args = parser.parse_args(sys.argv[1:])
+    test_list = None
+    if args.testfile:
+        test_list = config_parser.parse_test_file(args.testfile[0])
+    elif args.testclass:
+        test_list = args.testclass
+
+    config = args.config
+
+    if config.endswith(".json"):
+        print(
+            "DEPRECATION NOTICE: Converting ACTS JSON to Mobly YAML. ACTS is "
+            + "deprecated. Support will be removed in the next release."
+        )
+        config = utils.acts_json_to_mobly_yaml(config)
+        print(f"Wrote YAML config to {config}")
+
+    parsed_configs = mobly_config_parser.load_test_config_file(config, args.testbed)
+
+    for test_run_config in parsed_configs:
+        if args.testpaths:
+            tp_key = keys.Config.key_test_paths.value
+            test_run_config.controller_configs[tp_key] = args.testpaths
+        if args.logpath:
+            test_run_config.log_path = args.logpath
+        if args.test_case_iterations:
+            ti_key = keys.Config.key_test_case_iterations.value
+            test_run_config.user_params[ti_key] = args.test_case_iterations
+
+        # Sets the --testpaths flag to the default test directory if left unset.
+        testpath_key = keys.Config.key_test_paths.value
+        if (
+            testpath_key not in test_run_config.controller_configs
+            or test_run_config.controller_configs[testpath_key] is None
+        ):
+            test_run_config.controller_configs[testpath_key] = [
+                os.path.join(os.path.dirname(__file__), "../tests/"),
+            ]
+
+        for path in test_run_config.controller_configs[testpath_key]:
+            path = utils.abs_path(path)
+
+        # TODO(markdr): Find a way to merge this with the validation done in
+        # Mobly's load_test_config_file.
+        if not test_run_config.log_path:
+            raise ActsConfigError(
+                f"Required key {keys.Config.key_log_path.value} missing in test config."
+            )
+        test_run_config.log_path = utils.abs_path(test_run_config.log_path)
+
+    # Prepare args for test runs
+    test_identifiers = config_parser.parse_test_list(test_list)
+
+    print(
+        "\n\nDEPRECATION NOTICE: Running antlion tests with act.py is "
+        "deprecated and will be removed in the next release. Please migrate "
+        "by using Mobly YAML configs and executing the test class directly:\n\n"
+    )
+    for test_class, _ in test_identifiers:
+        print(f"   python {test_class}.py -c {config}")
+    print("\n")
+
+    exec_result = _run_tests(parsed_configs, test_identifiers, args.campaign_iterations)
+    if exec_result is False:
+        # return 1 upon test failure.
+        sys.exit(1)
+    sys.exit(0)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/__init__.py b/packages/antlion/capabilities/__init__.py
similarity index 100%
copy from src/antlion/controllers/ap_lib/third_party_ap_profiles/__init__.py
copy to packages/antlion/capabilities/__init__.py
diff --git a/packages/antlion/capabilities/ssh.py b/packages/antlion/capabilities/ssh.py
new file mode 100644
index 0000000..ca94d73
--- /dev/null
+++ b/packages/antlion/capabilities/ssh.py
@@ -0,0 +1,445 @@
+#!/usr/bin/env python3
+#
+# Copyright 2023 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import shutil
+import subprocess
+import time
+from dataclasses import dataclass
+from typing import Any, BinaryIO, Mapping
+
+from antlion import logger, signals
+from antlion.net import wait_for_port
+
+DEFAULT_SSH_PORT: int = 22
+DEFAULT_SSH_TIMEOUT_SEC: int = 60
+DEFAULT_SSH_CONNECT_TIMEOUT_SEC: int = 90
+DEFAULT_SSH_SERVER_ALIVE_INTERVAL: int = 30
+# The default package repository for all components.
+
+
+class SSHResult:
+    """Result of an SSH command."""
+
+    def __init__(
+        self,
+        process: subprocess.CompletedProcess[bytes]
+        | subprocess.CompletedProcess[str]
+        | subprocess.CalledProcessError,
+    ) -> None:
+        if isinstance(process.stdout, bytes):
+            self._stdout_bytes = process.stdout
+        elif isinstance(process.stdout, str):
+            self._stdout = process.stdout
+        else:
+            raise TypeError(
+                "Expected process.stdout to be either bytes or str, "
+                f"got {type(process.stdout)}"
+            )
+
+        if isinstance(process.stderr, bytes):
+            self._stderr_bytes = process.stderr
+        elif isinstance(process.stderr, str):
+            self._stderr = process.stderr
+        else:
+            raise TypeError(
+                "Expected process.stderr to be either bytes or str, "
+                f"got {type(process.stderr)}"
+            )
+
+        self._exit_status = process.returncode
+
+    def __str__(self):
+        if self.exit_status == 0:
+            return self.stdout
+        return f'status {self.exit_status}, stdout: "{self.stdout}", stderr: "{self.stderr}"'
+
+    @property
+    def stdout(self) -> str:
+        if not hasattr(self, "_stdout"):
+            self._stdout = self._stdout_bytes.decode("utf-8", errors="replace")
+        return self._stdout
+
+    @property
+    def stdout_bytes(self) -> bytes:
+        if not hasattr(self, "_stdout_bytes"):
+            self._stdout_bytes = self._stdout.encode()
+        return self._stdout_bytes
+
+    @property
+    def stderr(self) -> str:
+        if not hasattr(self, "_stderr"):
+            self._stderr = self._stderr_bytes.decode("utf-8", errors="replace")
+        return self._stderr
+
+    @property
+    def exit_status(self) -> int:
+        return self._exit_status
+
+
+class SSHError(signals.TestError):
+    """A SSH command returned with a non-zero status code."""
+
+    def __init__(self, command: str, result: SSHResult):
+        super().__init__(f'SSH command "{command}" unexpectedly returned {result}')
+        self.result = result
+
+
+class SSHTimeout(signals.TestError):
+    """A SSH command timed out."""
+
+    def __init__(self, err: subprocess.TimeoutExpired):
+        super().__init__(
+            f'SSH command "{err.cmd}" timed out after {err.timeout}s, '
+            f"stdout={err.stdout!r}, stderr={err.stderr!r}"
+        )
+
+
+class SSHTransportError(signals.TestError):
+    """Failure to send an SSH command."""
+
+
+@dataclass
+class SSHConfig:
+    """SSH client config."""
+
+    # SSH flags. See ssh(1) for full details.
+    user: str
+    host_name: str
+    identity_file: str
+
+    ssh_binary: str = "ssh"
+    config_file: str = "/dev/null"
+    port: int = 22
+
+    # SSH options. See ssh_config(5) for full details.
+    connect_timeout: int = DEFAULT_SSH_CONNECT_TIMEOUT_SEC
+    server_alive_interval: int = DEFAULT_SSH_SERVER_ALIVE_INTERVAL
+    strict_host_key_checking: bool = False
+    user_known_hosts_file: str = "/dev/null"
+    log_level: str = "ERROR"
+
+    def full_command(self, command: str, force_tty: bool = False) -> list[str]:
+        """Generate the complete command to execute command over SSH.
+
+        Args:
+            command: The command to run over SSH
+            force_tty: Force pseudo-terminal allocation. This can be used to
+                execute arbitrary screen-based programs on a remote machine,
+                which can be very useful, e.g. when implementing menu services.
+
+        Returns:
+            Arguments composing the complete call to SSH.
+        """
+        optional_flags = []
+        if force_tty:
+            # Multiple -t options force tty allocation, even if ssh has no local
+            # tty. This is necessary for launching ssh with subprocess without
+            # shell=True.
+            optional_flags.append("-tt")
+
+        return (
+            [
+                self.ssh_binary,
+                # SSH flags
+                "-i",
+                self.identity_file,
+                "-F",
+                self.config_file,
+                "-p",
+                str(self.port),
+                # SSH configuration options
+                "-o",
+                f"ConnectTimeout={self.connect_timeout}",
+                "-o",
+                f"ServerAliveInterval={self.server_alive_interval}",
+                "-o",
+                f'StrictHostKeyChecking={"yes" if self.strict_host_key_checking else "no"}',
+                "-o",
+                f"UserKnownHostsFile={self.user_known_hosts_file}",
+                "-o",
+                f"LogLevel={self.log_level}",
+            ]
+            + optional_flags
+            + [f"{self.user}@{self.host_name}"]
+            + command.split()
+        )
+
+    @staticmethod
+    def from_config(config: Mapping[str, Any]) -> "SSHConfig":
+        ssh_binary_path = config.get("ssh_binary_path", None)
+        if ssh_binary_path is None:
+            ssh_binary_path = shutil.which("ssh")
+        if type(ssh_binary_path) != str:
+            raise ValueError(f"ssh_binary_path must be a string, got {ssh_binary_path}")
+
+        user = config.get("user", None)
+        if type(user) != str:
+            raise ValueError(f"user must be a string, got {user}")
+
+        host = config.get("host", None)
+        if type(host) != str:
+            raise ValueError(f"host must be a string, got {host}")
+
+        port = config.get("port", 22)
+        if type(port) != int:
+            raise ValueError(f"port must be an integer, got {port}")
+
+        identity_file = config.get("identity_file", None)
+        if type(identity_file) != str:
+            raise ValueError(f"identity_file must be a string, got {identity_file}")
+
+        ssh_config = config.get("ssh_config", "/dev/null")
+        if type(ssh_config) != str:
+            raise ValueError(f"ssh_config must be a string, got {ssh_config}")
+
+        connect_timeout = config.get("connect_timeout", 30)
+        if type(connect_timeout) != int:
+            raise ValueError(
+                f"connect_timeout must be an integer, got {connect_timeout}"
+            )
+
+        return SSHConfig(
+            user=user,
+            host_name=host,
+            identity_file=identity_file,
+            ssh_binary=ssh_binary_path,
+            config_file=ssh_config,
+            port=port,
+            connect_timeout=connect_timeout,
+        )
+
+
+class SSHProvider:
+    """Device-specific provider for SSH clients."""
+
+    def __init__(self, config: SSHConfig) -> None:
+        """
+        Args:
+            config: SSH client config
+        """
+        logger_tag = f"ssh | {config.host_name}"
+        if config.port != DEFAULT_SSH_PORT:
+            logger_tag += f":{config.port}"
+
+        # Check if the private key exists
+
+        self.log = logger.create_tagged_trace_logger(logger_tag)
+        self.config = config
+
+        try:
+            self.wait_until_reachable()
+            self.log.info("sshd is reachable")
+        except Exception as e:
+            raise TimeoutError("sshd is unreachable") from e
+
+    def wait_until_reachable(self) -> None:
+        """Wait for the device to become reachable via SSH.
+
+        Raises:
+            TimeoutError: connect_timeout has expired without a successful SSH
+                connection to the device
+            SSHTransportError: SSH is available on the device but
+                connect_timeout has expired and SSH fails to run
+            SSHTimeout: SSH is available on the device but connect_timeout has
+                expired and SSH takes too long to run a command
+        """
+        timeout_sec = self.config.connect_timeout
+        timeout = time.time() + timeout_sec
+        wait_for_port(self.config.host_name, self.config.port, timeout_sec=timeout_sec)
+
+        while True:
+            try:
+                self._run("echo", timeout_sec, False, None)
+                return
+            except SSHTransportError as e:
+                # Repeat if necessary; _run() can exit prematurely by receiving
+                # SSH transport errors. These errors can be caused by sshd not
+                # being fully initialized yet.
+                if time.time() < timeout:
+                    continue
+                else:
+                    raise e
+
+    def wait_until_unreachable(
+        self, interval_sec: int = 1, timeout_sec: int = DEFAULT_SSH_CONNECT_TIMEOUT_SEC
+    ) -> None:
+        """Wait for the device to become unreachable via SSH.
+
+        Args:
+            interval_sec: Seconds to wait between unreachability attempts
+            timeout_sec: Seconds to wait until raising TimeoutError
+
+        Raises:
+            TimeoutError: when timeout_sec has expired without an unsuccessful
+                SSH connection to the device
+        """
+        timeout = time.time() + timeout_sec
+
+        while True:
+            try:
+                wait_for_port(
+                    self.config.host_name, self.config.port, timeout_sec=interval_sec
+                )
+            except TimeoutError:
+                return
+
+            if time.time() < timeout:
+                raise TimeoutError(
+                    f"Connection to {self.config.host_name} is still reachable "
+                    f"after {timeout_sec}s"
+                )
+
+    def run(
+        self,
+        command: str,
+        timeout_sec: int = DEFAULT_SSH_TIMEOUT_SEC,
+        connect_retries: int = 3,
+        force_tty: bool = False,
+    ) -> SSHResult:
+        """Run a command on the device then exit.
+
+        Args:
+            command: String to send to the device.
+            timeout_sec: Seconds to wait for the command to complete.
+            connect_retries: Amount of times to retry connect on fail.
+            force_tty: Force pseudo-terminal allocation.
+
+        Raises:
+            SSHError: if the SSH command returns a non-zero status code
+            SSHTransportError: if SSH fails to run the command
+            SSHTimeout: if there is no response within timeout_sec
+
+        Returns:
+            SSHResults from the executed command.
+        """
+        return self._run_with_retry(
+            command, timeout_sec, connect_retries, force_tty, stdin=None
+        )
+
+    def _run_with_retry(
+        self,
+        command: str,
+        timeout_sec: int,
+        connect_retries: int,
+        force_tty: bool,
+        stdin: BinaryIO | None,
+    ) -> SSHResult:
+        err: Exception = ValueError("connect_retries cannot be 0")
+        for i in range(0, connect_retries):
+            try:
+                return self._run(command, timeout_sec, force_tty, stdin)
+            except SSHTransportError as e:
+                err = e
+                self.log.warn(f"Connect failed: {e}")
+        raise err
+
+    def _run(
+        self, command: str, timeout_sec: int, force_tty: bool, stdin: BinaryIO | None
+    ) -> SSHResult:
+        full_command = self.config.full_command(command, force_tty)
+        self.log.debug(
+            f'Running "{command}" (full command: "{" ".join(full_command)}")'
+        )
+        try:
+            process = subprocess.run(
+                full_command,
+                capture_output=True,
+                timeout=timeout_sec,
+                check=True,
+                stdin=stdin,
+            )
+        except subprocess.CalledProcessError as e:
+            if e.returncode == 255:
+                stderr = e.stderr.decode("utf-8", errors="replace")
+                if (
+                    "Name or service not known" in stderr
+                    or "Host does not exist" in stderr
+                ):
+                    raise SSHTransportError(
+                        f"Hostname {self.config.host_name} cannot be resolved to an address"
+                    ) from e
+                if "Connection timed out" in stderr:
+                    raise SSHTransportError(
+                        f"Failed to establish a connection to {self.config.host_name} within {timeout_sec}s"
+                    ) from e
+                if "Connection refused" in stderr:
+                    raise SSHTransportError(
+                        f"Connection refused by {self.config.host_name}"
+                    ) from e
+
+            raise SSHError(command, SSHResult(e)) from e
+        except subprocess.TimeoutExpired as e:
+            raise SSHTimeout(e) from e
+
+        return SSHResult(process)
+
+    def upload_file(
+        self,
+        local_path: str,
+        remote_path: str,
+        timeout_sec: int = DEFAULT_SSH_TIMEOUT_SEC,
+        connect_retries: int = 3,
+    ) -> None:
+        """Upload a file to the device.
+
+        Args:
+            local_path: Path to the file to upload
+            remote_path: Path on the remote device to place the uploaded file.
+            timeout_sec: Seconds to wait for the command to complete.
+            connect_retries: Amount of times to retry connect on fail.
+
+        Raises:
+            SSHError: if the SSH upload returns a non-zero status code
+            SSHTransportError: if SSH fails to run the upload command
+            SSHTimeout: if there is no response within timeout_sec
+        """
+        with open(local_path, "rb") as file:
+            self._run_with_retry(
+                f"cat > {remote_path}",
+                timeout_sec,
+                connect_retries,
+                force_tty=False,
+                stdin=file,
+            )
+
+    def download_file(
+        self,
+        remote_path: str,
+        local_path: str,
+        timeout_sec: int = DEFAULT_SSH_TIMEOUT_SEC,
+        connect_retries: int = 3,
+    ) -> None:
+        """Upload a file to the device.
+
+        Args:
+            remote_path: Path on the remote device to download.
+            local_path: Path on the host to the place the downloaded file.
+            timeout_sec: Seconds to wait for the command to complete.
+            connect_retries: Amount of times to retry connect on fail.
+
+        Raises:
+            SSHError: if the SSH command returns a non-zero status code
+            SSHTransportError: if SSH fails to run the command
+            SSHTimeout: if there is no response within timeout_sec
+        """
+        with open(local_path, "rb") as file:
+            self._run_with_retry(
+                f"cat > {remote_path}",
+                timeout_sec,
+                connect_retries,
+                force_tty=False,
+                stdin=file,
+            )
diff --git a/packages/antlion/config_parser.py b/packages/antlion/config_parser.py
new file mode 100755
index 0000000..cf8dc90
--- /dev/null
+++ b/packages/antlion/config_parser.py
@@ -0,0 +1,249 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import itertools
+import os
+import sys
+
+import mobly.config_parser as mobly_config_parser
+
+from antlion import keys, utils
+
+# An environment variable defining the base location for ACTS logs.
+_ENV_ACTS_LOGPATH = "ACTS_LOGPATH"
+# An environment variable that enables test case failures to log stack traces.
+_ENV_TEST_FAILURE_TRACEBACKS = "ACTS_TEST_FAILURE_TRACEBACKS"
+# An environment variable defining the test search paths for ACTS.
+_ENV_ACTS_TESTPATHS = "ACTS_TESTPATHS"
+_PATH_SEPARATOR = ":"
+
+
+class ActsConfigError(Exception):
+    """Raised when there is a problem in test configuration file."""
+
+
+def _validate_test_config(test_config):
+    """Validates the raw configuration loaded from the config file.
+
+    Making sure all the required fields exist.
+    """
+    for k in keys.Config.reserved_keys.value:
+        # TODO(markdr): Remove this continue after merging this with the
+        # validation done in Mobly's load_test_config_file.
+        if k == keys.Config.key_test_paths.value or k == keys.Config.key_log_path.value:
+            continue
+
+        if k not in test_config:
+            raise ActsConfigError(f"Required key {k} missing in test config.")
+
+
+def _validate_testbed_name(name):
+    """Validates the name of a test bed.
+
+    Since test bed names are used as part of the test run id, it needs to meet
+    certain requirements.
+
+    Args:
+        name: The test bed's name specified in config file.
+
+    Raises:
+        If the name does not meet any criteria, ActsConfigError is raised.
+    """
+    if not name:
+        raise ActsConfigError("Test bed names can't be empty.")
+    if not isinstance(name, str):
+        raise ActsConfigError("Test bed names have to be string.")
+    for l in name:
+        if l not in utils.valid_filename_chars:
+            raise ActsConfigError(f"Char '{l}' is not allowed in test bed names.")
+
+
+def _validate_testbed_configs(testbed_configs):
+    """Validates the testbed configurations.
+
+    Args:
+        testbed_configs: A list of testbed configuration json objects.
+
+    Raises:
+        If any part of the configuration is invalid, ActsConfigError is raised.
+    """
+    # Cross checks testbed configs for resource conflicts.
+    for name in testbed_configs:
+        _validate_testbed_name(name)
+
+
+def gen_term_signal_handler(test_runners):
+    def termination_sig_handler(signal_num, frame):
+        print(f"Received sigterm {signal_num}.")
+        for t in test_runners:
+            t.stop()
+        sys.exit(1)
+
+    return termination_sig_handler
+
+
+def _parse_one_test_specifier(item):
+    """Parse one test specifier from command line input.
+
+    Args:
+        item: A string that specifies a test class or test cases in one test
+            class to run.
+
+    Returns:
+        A tuple of a string and a list of strings. The string is the test class
+        name, the list of strings is a list of test case names. The list can be
+        None.
+    """
+    tokens = item.split(":")
+    if len(tokens) > 2:
+        raise ActsConfigError(f"Syntax error in test specifier {item}")
+    if len(tokens) == 1:
+        # This should be considered a test class name
+        test_cls_name = tokens[0]
+        return test_cls_name, None
+    elif len(tokens) == 2:
+        # This should be considered a test class name followed by
+        # a list of test case names.
+        test_cls_name, test_case_names = tokens
+        clean_names = [elem.strip() for elem in test_case_names.split(",")]
+        return test_cls_name, clean_names
+
+
+def parse_test_list(test_list):
+    """Parse user provided test list into internal format for test_runner.
+
+    Args:
+        test_list: A list of test classes/cases.
+    """
+    result = []
+    for elem in test_list:
+        result.append(_parse_one_test_specifier(elem))
+    return result
+
+
+def load_test_config_file(test_config_path, tb_filters=None):
+    """Processes the test configuration file provided by the user.
+
+    Loads the configuration file into a json object, unpacks each testbed
+    config into its own TestRunConfig object, and validate the configuration in
+    the process.
+
+    Args:
+        test_config_path: Path to the test configuration file.
+        tb_filters: A subset of test bed names to be pulled from the config
+                    file. If None, then all test beds will be selected.
+
+    Returns:
+        A list of mobly.config_parser.TestRunConfig objects to be passed to
+        test_runner.TestRunner.
+    """
+    configs = utils.load_config(test_config_path)
+
+    testbeds = configs[keys.Config.key_testbed.value]
+    if type(testbeds) is list:
+        tb_dict = dict()
+        for testbed in testbeds:
+            tb_dict[testbed[keys.Config.key_testbed_name.value]] = testbed
+        testbeds = tb_dict
+    elif type(testbeds) is dict:
+        # For compatibility, make sure the entry name is the same as
+        # the testbed's "name" entry
+        for name, testbed in testbeds.items():
+            testbed[keys.Config.key_testbed_name.value] = name
+
+    if tb_filters:
+        tbs = {}
+        for name in tb_filters:
+            if name in testbeds:
+                tbs[name] = testbeds[name]
+            else:
+                raise ActsConfigError(
+                    'Expected testbed named "%s", but none was found. Check '
+                    "if you have the correct testbed names." % name
+                )
+        testbeds = tbs
+
+    if (
+        keys.Config.key_log_path.value not in configs
+        and _ENV_ACTS_LOGPATH in os.environ
+    ):
+        print(f"Using environment log path: {os.environ[_ENV_ACTS_LOGPATH]}")
+        configs[keys.Config.key_log_path.value] = os.environ[_ENV_ACTS_LOGPATH]
+    if (
+        keys.Config.key_test_paths.value not in configs
+        and _ENV_ACTS_TESTPATHS in os.environ
+    ):
+        print(f"Using environment test paths: {os.environ[_ENV_ACTS_TESTPATHS]}")
+        configs[keys.Config.key_test_paths.value] = os.environ[
+            _ENV_ACTS_TESTPATHS
+        ].split(_PATH_SEPARATOR)
+    if (
+        keys.Config.key_test_failure_tracebacks not in configs
+        and _ENV_TEST_FAILURE_TRACEBACKS in os.environ
+    ):
+        configs[keys.Config.key_test_failure_tracebacks.value] = os.environ[
+            _ENV_TEST_FAILURE_TRACEBACKS
+        ]
+
+    # TODO: See if there is a better way to do this: b/29836695
+    config_path, _ = os.path.split(utils.abs_path(test_config_path))
+    configs[keys.Config.key_config_path.value] = config_path
+    _validate_test_config(configs)
+    _validate_testbed_configs(testbeds)
+    # Unpack testbeds into separate json objects.
+    configs.pop(keys.Config.key_testbed.value)
+    test_run_configs = []
+
+    for _, testbed in testbeds.items():
+        test_run_config = mobly_config_parser.TestRunConfig()
+        test_run_config.testbed_name = testbed[keys.Config.key_testbed_name.value]
+        test_run_config.controller_configs = testbed
+        test_run_config.controller_configs[
+            keys.Config.key_test_paths.value
+        ] = configs.get(keys.Config.key_test_paths.value, None)
+        test_run_config.log_path = configs.get(keys.Config.key_log_path.value, None)
+        if test_run_config.log_path is not None:
+            test_run_config.log_path = utils.abs_path(test_run_config.log_path)
+
+        user_param_pairs = []
+        for item in itertools.chain(configs.items(), testbed.items()):
+            if item[0] not in keys.Config.reserved_keys.value:
+                user_param_pairs.append(item)
+        test_run_config.user_params = dict(user_param_pairs)
+
+        test_run_configs.append(test_run_config)
+    return test_run_configs
+
+
+def parse_test_file(fpath):
+    """Parses a test file that contains test specifiers.
+
+    Args:
+        fpath: A string that is the path to the test file to parse.
+
+    Returns:
+        A list of strings, each is a test specifier.
+    """
+    with open(fpath, "r") as f:
+        tf = []
+        for line in f:
+            line = line.strip()
+            if not line:
+                continue
+            if len(tf) and (tf[-1].endswith(":") or tf[-1].endswith(",")):
+                tf[-1] += line
+            else:
+                tf.append(line)
+        return tf
diff --git a/packages/antlion/context.py b/packages/antlion/context.py
new file mode 100644
index 0000000..61a6b92
--- /dev/null
+++ b/packages/antlion/context.py
@@ -0,0 +1,357 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import enum
+import logging
+import os
+
+from antlion.event import event_bus
+from antlion.event.event import (
+    Event,
+    TestCaseBeginEvent,
+    TestCaseEndEvent,
+    TestCaseEvent,
+    TestClassBeginEvent,
+    TestClassEndEvent,
+    TestClassEvent,
+)
+
+
+class ContextLevel(enum.IntEnum):
+    ROOT = 0
+    TESTCLASS = 1
+    TESTCASE = 2
+
+
+def get_current_context(depth=None):
+    """Get the current test context at the specified depth.
+    Pulls the most recently created context, with a level at or below the given
+    depth, from the _contexts stack.
+
+    Args:
+        depth: The desired context level. For example, the TESTCLASS level would
+            yield the current test class context, even if the test is currently
+            within a test case.
+
+    Returns: An instance of TestContext.
+    """
+    if depth is None:
+        return _contexts[-1]
+    return _contexts[min(depth, len(_contexts) - 1)]
+
+
+def get_context_for_event(event):
+    """Creates and returns a TestContext from the given event.
+    A TestClassContext is created for a TestClassEvent, and a TestCaseContext
+    is created for a TestCaseEvent.
+
+    Args:
+        event: An instance of TestCaseEvent or TestClassEvent.
+
+    Returns: An instance of TestContext corresponding to the event.
+
+    Raises: TypeError if event is neither a TestCaseEvent nor TestClassEvent
+    """
+    if isinstance(event, TestCaseEvent):
+        return _get_context_for_test_case_event(event)
+    if isinstance(event, TestClassEvent):
+        return _get_context_for_test_class_event(event)
+    raise TypeError("Unrecognized event type: %s %s", event, event.__class__)
+
+
+def _get_context_for_test_case_event(event):
+    """Generate a TestCaseContext from the given TestCaseEvent."""
+    return TestCaseContext(event.test_class, event.test_case)
+
+
+def _get_context_for_test_class_event(event):
+    """Generate a TestClassContext from the given TestClassEvent."""
+    return TestClassContext(event.test_class)
+
+
+class NewContextEvent(Event):
+    """The event posted when a test context has changed."""
+
+
+class NewTestClassContextEvent(NewContextEvent):
+    """The event posted when the test class context has changed."""
+
+
+class NewTestCaseContextEvent(NewContextEvent):
+    """The event posted when the test case context has changed."""
+
+
+def _update_test_class_context(event):
+    """Pushes a new TestClassContext to the _contexts stack upon a
+    TestClassBeginEvent. Pops the most recent context off the stack upon a
+    TestClassEndEvent. Posts the context change to the event bus.
+
+    Args:
+        event: An instance of TestClassBeginEvent or TestClassEndEvent.
+    """
+    if isinstance(event, TestClassBeginEvent):
+        _contexts.append(_get_context_for_test_class_event(event))
+    if isinstance(event, TestClassEndEvent):
+        if _contexts:
+            _contexts.pop()
+    event_bus.post(NewTestClassContextEvent())
+
+
+def _update_test_case_context(event):
+    """Pushes a new TestCaseContext to the _contexts stack upon a
+    TestCaseBeginEvent. Pops the most recent context off the stack upon a
+    TestCaseEndEvent. Posts the context change to the event bus.
+
+    Args:
+        event: An instance of TestCaseBeginEvent or TestCaseEndEvent.
+    """
+    if isinstance(event, TestCaseBeginEvent):
+        _contexts.append(_get_context_for_test_case_event(event))
+    if isinstance(event, TestCaseEndEvent):
+        if _contexts:
+            _contexts.pop()
+    event_bus.post(NewTestCaseContextEvent())
+
+
+event_bus.register(TestClassEvent, _update_test_class_context)
+event_bus.register(TestCaseBeginEvent, _update_test_case_context, order=-100)
+event_bus.register(TestCaseEndEvent, _update_test_case_context, order=100)
+
+
+class TestContext(object):
+    """An object representing the current context in which a test is executing.
+
+    The context encodes the current state of the test runner with respect to a
+    particular scenario in which code is being executed. For example, if some
+    code is being executed as part of a test case, then the context should
+    encode information about that test case such as its name or enclosing
+    class.
+
+    The subcontext specifies a relative path in which certain outputs,
+    e.g. logcat, should be kept for the given context.
+
+    The full output path is given by
+    <base_output_path>/<context_dir>/<subcontext>.
+
+    Attributes:
+        _base_output_paths: a dictionary mapping a logger's name to its base
+                            output path
+        _subcontexts: a dictionary mapping a logger's name to its
+                      subcontext-level output directory
+    """
+
+    _base_output_paths = {}
+    _subcontexts = {}
+
+    def get_base_output_path(self, log_name=None):
+        """Gets the base output path for this logger.
+
+        The base output path is interpreted as the reporting root for the
+        entire test runner.
+
+        If a path has been added with add_base_output_path, it is returned.
+        Otherwise, a default is determined by _get_default_base_output_path().
+
+        Args:
+            log_name: The name of the logger.
+
+        Returns:
+            The output path.
+        """
+        if log_name in self._base_output_paths:
+            return self._base_output_paths[log_name]
+        return self._get_default_base_output_path()
+
+    @classmethod
+    def add_base_output_path(cls, log_name, base_output_path):
+        """Store the base path for this logger.
+
+        Args:
+            log_name: The name of the logger.
+            base_output_path: The base path of output files for this logger.
+        """
+        cls._base_output_paths[log_name] = base_output_path
+
+    def get_subcontext(self, log_name=None):
+        """Gets the subcontext for this logger.
+
+        The subcontext is interpreted as the directory, relative to the
+        context-level path, where all outputs of the given logger are stored.
+
+        If a path has been added with add_subcontext, it is returned.
+        Otherwise, the empty string is returned.
+
+        Args:
+            log_name: The name of the logger.
+
+        Returns:
+            The output path.
+        """
+        return self._subcontexts.get(log_name, "")
+
+    @classmethod
+    def add_subcontext(cls, log_name, subcontext):
+        """Store the subcontext path for this logger.
+
+        Args:
+            log_name: The name of the logger.
+            subcontext: The relative subcontext path of output files for this
+                        logger.
+        """
+        cls._subcontexts[log_name] = subcontext
+
+    def get_full_output_path(self, log_name=None):
+        """Gets the full output path for this context.
+
+        The full path represents the absolute path to the output directory,
+        as given by <base_output_path>/<context_dir>/<subcontext>
+
+        Args:
+            log_name: The name of the logger. Used to specify the base output
+                      path and the subcontext.
+
+        Returns:
+            The output path.
+        """
+
+        path = os.path.join(
+            self.get_base_output_path(log_name),
+            self._get_default_context_dir(),
+            self.get_subcontext(log_name),
+        )
+        os.makedirs(path, exist_ok=True)
+        return path
+
+    @property
+    def identifier(self):
+        raise NotImplementedError()
+
+    def _get_default_base_output_path(self):
+        """Gets the default base output path.
+
+        This will attempt to use the ACTS logging path set up in the global
+        logger.
+
+        Returns:
+            The logging path.
+
+        Raises:
+            EnvironmentError: If the ACTS logger has not been initialized.
+        """
+        try:
+            return logging.log_path
+        except AttributeError as e:
+            raise EnvironmentError(
+                "The ACTS logger has not been set up and"
+                ' "base_output_path" has not been set.'
+            ) from e
+
+    def _get_default_context_dir(self):
+        """Gets the default output directory for this context."""
+        raise NotImplementedError()
+
+
+class RootContext(TestContext):
+    """A TestContext that represents a test run."""
+
+    @property
+    def identifier(self):
+        return "root"
+
+    def _get_default_context_dir(self):
+        """Gets the default output directory for this context.
+
+        Logs at the root level context are placed directly in the base level
+        directory, so no context-level path exists."""
+        return ""
+
+
+class TestClassContext(TestContext):
+    """A TestContext that represents a test class.
+
+    Attributes:
+        test_class: The test class instance that this context represents.
+    """
+
+    def __init__(self, test_class):
+        """Initializes a TestClassContext for the given test class.
+
+        Args:
+            test_class: A test class object. Must be an instance of the test
+                        class, not the class object itself.
+        """
+        self.test_class = test_class
+
+    @property
+    def test_class_name(self):
+        return self.test_class.__class__.__name__
+
+    @property
+    def identifier(self):
+        return self.test_class_name
+
+    def _get_default_context_dir(self):
+        """Gets the default output directory for this context.
+
+        For TestClassContexts, this will be the name of the test class. This is
+        in line with the ACTS logger itself.
+        """
+        return self.test_class_name
+
+
+class TestCaseContext(TestContext):
+    """A TestContext that represents a test case.
+
+    Attributes:
+        test_case: The string name of the test case.
+        test_class: The test class instance enclosing the test case.
+    """
+
+    def __init__(self, test_class, test_case):
+        """Initializes a TestCaseContext for the given test case.
+
+        Args:
+            test_class: A test class object. Must be an instance of the test
+                        class, not the class object itself.
+            test_case: The string name of the test case.
+        """
+        self.test_class = test_class
+        self.test_case = test_case
+
+    @property
+    def test_case_name(self):
+        return self.test_case
+
+    @property
+    def test_class_name(self):
+        return self.test_class.__class__.__name__
+
+    @property
+    def identifier(self):
+        return f"{self.test_class_name}.{self.test_case_name}"
+
+    def _get_default_context_dir(self):
+        """Gets the default output directory for this context.
+
+        For TestCaseContexts, this will be the name of the test class followed
+        by the name of the test case. This is in line with the ACTS logger
+        itself.
+        """
+        return os.path.join(self.test_class_name, self.test_case_name)
+
+
+# stack for keeping track of the current test context
+_contexts = [RootContext()]
diff --git a/src/antlion/controllers/OWNERS b/packages/antlion/controllers/OWNERS
similarity index 100%
rename from src/antlion/controllers/OWNERS
rename to packages/antlion/controllers/OWNERS
diff --git a/src/antlion/controllers/fuchsia_lib/lib_controllers/__init__.py b/packages/antlion/controllers/__init__.py
similarity index 100%
copy from src/antlion/controllers/fuchsia_lib/lib_controllers/__init__.py
copy to packages/antlion/controllers/__init__.py
diff --git a/packages/antlion/controllers/access_point.py b/packages/antlion/controllers/access_point.py
new file mode 100755
index 0000000..9a1f227
--- /dev/null
+++ b/packages/antlion/controllers/access_point.py
@@ -0,0 +1,890 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ipaddress
+import time
+from dataclasses import dataclass
+from typing import Any, FrozenSet, Mapping
+
+from antlion import logger, utils
+from antlion.capabilities.ssh import SSHConfig, SSHProvider
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.ap_lib.ap_get_interface import ApInterfaces
+from antlion.controllers.ap_lib.ap_iwconfig import ApIwconfig
+from antlion.controllers.ap_lib.bridge_interface import BridgeInterface
+from antlion.controllers.ap_lib.dhcp_config import DhcpConfig, Subnet
+from antlion.controllers.ap_lib.dhcp_server import DhcpServer, NoInterfaceError
+from antlion.controllers.ap_lib.extended_capabilities import ExtendedCapabilities
+from antlion.controllers.ap_lib.hostapd import Hostapd
+from antlion.controllers.ap_lib.hostapd_ap_preset import create_ap_preset
+from antlion.controllers.ap_lib.hostapd_config import HostapdConfig
+from antlion.controllers.ap_lib.hostapd_security import Security
+from antlion.controllers.ap_lib.radvd import Radvd
+from antlion.controllers.ap_lib.radvd_config import RadvdConfig
+from antlion.controllers.ap_lib.wireless_network_management import (
+    BssTransitionManagementRequest,
+)
+from antlion.controllers.pdu import PduDevice, get_pdu_port_for_device
+from antlion.controllers.utils_lib.commands import ip, route
+from antlion.controllers.utils_lib.ssh import connection, settings
+from antlion.libs.proc import job
+
+MOBLY_CONTROLLER_CONFIG_NAME = "AccessPoint"
+ACTS_CONTROLLER_REFERENCE_NAME = "access_points"
+
+
+class Error(Exception):
+    """Error raised when there is a problem with the access point."""
+
+
+@dataclass
+class _ApInstance:
+    hostapd: Hostapd
+    subnet: Subnet
+
+
+# These ranges were split this way since each physical radio can have up
+# to 8 SSIDs so for the 2GHz radio the DHCP range will be
+# 192.168.1 - 8 and the 5Ghz radio will be 192.168.9 - 16
+_AP_2GHZ_SUBNET_STR_DEFAULT = "192.168.1.0/24"
+_AP_5GHZ_SUBNET_STR_DEFAULT = "192.168.9.0/24"
+
+# The last digit of the ip for the bridge interface
+BRIDGE_IP_LAST = "100"
+
+
+class AccessPoint(object):
+    """An access point controller.
+
+    Attributes:
+        ssh: The ssh connection to this ap.
+        ssh_settings: The ssh settings being used by the ssh connection.
+        dhcp_settings: The dhcp server settings being used.
+    """
+
+    def __init__(self, configs: Mapping[str, Any]) -> None:
+        """
+        Args:
+            configs: configs for the access point from config file.
+        """
+        self.ssh_settings = settings.from_config(configs["ssh_config"])
+        self.log = logger.create_logger(
+            lambda msg: f"[Access Point|{self.ssh_settings.hostname}] {msg}"
+        )
+        self.device_pdu_config = configs.get("PduDevice", None)
+        self.identifier = self.ssh_settings.hostname
+
+        if "ap_subnet" in configs:
+            self._AP_2G_SUBNET_STR: str = configs["ap_subnet"]["2g"]
+            self._AP_5G_SUBNET_STR: str = configs["ap_subnet"]["5g"]
+        else:
+            self._AP_2G_SUBNET_STR = _AP_2GHZ_SUBNET_STR_DEFAULT
+            self._AP_5G_SUBNET_STR = _AP_5GHZ_SUBNET_STR_DEFAULT
+
+        self._AP_2G_SUBNET = Subnet(ipaddress.IPv4Network(self._AP_2G_SUBNET_STR))
+        self._AP_5G_SUBNET = Subnet(ipaddress.IPv4Network(self._AP_5G_SUBNET_STR))
+
+        self.ssh = connection.SshConnection(self.ssh_settings)
+
+        # TODO(http://b/278758876): Replace self.ssh with self.ssh_provider
+        self.ssh_provider = SSHProvider(
+            SSHConfig(
+                self.ssh_settings.username,
+                self.ssh_settings.hostname,
+                self.ssh_settings.identity_file,
+                port=self.ssh_settings.port,
+                ssh_binary=self.ssh_settings.executable,
+                connect_timeout=90,
+            )
+        )
+
+        # Singleton utilities for running various commands.
+        self._ip_cmd = ip.LinuxIpCommand(self.ssh)
+        self._route_cmd = route.LinuxRouteCommand(self.ssh)
+
+        # A map from network interface name to _ApInstance objects representing
+        # the hostapd instance running against the interface.
+        self._aps: dict[str, _ApInstance] = dict()
+        self._dhcp: DhcpServer | None = None
+        self._dhcp_bss: dict[Any, Subnet] = dict()
+        self._radvd: Radvd | None = None
+        self.bridge = BridgeInterface(self)
+        self.iwconfig = ApIwconfig(self)
+
+        # Check to see if wan_interface is specified in acts_config for tests
+        # isolated from the internet and set this override.
+        self.interfaces = ApInterfaces(self, configs.get("wan_interface"))
+
+        # Get needed interface names and initialize the unnecessary ones.
+        self.wan = self.interfaces.get_wan_interface()
+        self.wlan = self.interfaces.get_wlan_interface()
+        self.wlan_2g = self.wlan[0]
+        self.wlan_5g = self.wlan[1]
+        self.lan = self.interfaces.get_lan_interface()
+        self._initial_ap()
+        self.setup_bridge = False
+
+    def _initial_ap(self) -> None:
+        """Initial AP interfaces.
+
+        Bring down hostapd if instance is running, bring down all bridge
+        interfaces.
+        """
+        # This is necessary for Gale/Whirlwind flashed with dev channel image
+        # Unused interfaces such as existing hostapd daemon, guest, mesh
+        # interfaces need to be brought down as part of the AP initialization
+        # process, otherwise test would fail.
+        try:
+            self.ssh.run("stop wpasupplicant")
+        except job.Error:
+            self.log.info("No wpasupplicant running")
+        try:
+            self.ssh.run("stop hostapd")
+        except job.Error:
+            self.log.info("No hostapd running")
+        # Bring down all wireless interfaces
+        for iface in self.wlan:
+            WLAN_DOWN = f"ip link set {iface} down"
+            self.ssh.run(WLAN_DOWN)
+        # Bring down all bridge interfaces
+        bridge_interfaces = self.interfaces.get_bridge_interface()
+        for iface in bridge_interfaces:
+            BRIDGE_DOWN = f"ip link set {iface} down"
+            BRIDGE_DEL = f"brctl delbr {iface}"
+            self.ssh.run(BRIDGE_DOWN)
+            self.ssh.run(BRIDGE_DEL)
+
+    def start_ap(
+        self,
+        hostapd_config: HostapdConfig,
+        radvd_config: RadvdConfig | None = None,
+        setup_bridge: bool = False,
+        is_nat_enabled: bool = True,
+        additional_parameters: dict[str, Any] | None = None,
+    ) -> list[str]:
+        """Starts as an ap using a set of configurations.
+
+        This will start an ap on this host. To start an ap the controller
+        selects a network interface to use based on the configs given. It then
+        will start up hostapd on that interface. Next a subnet is created for
+        the network interface and dhcp server is refreshed to give out ips
+        for that subnet for any device that connects through that interface.
+
+        Args:
+            hostapd_config: The configurations to use when starting up the ap.
+            radvd_config: The IPv6 configuration to use when starting up the ap.
+            setup_bridge: Whether to bridge the LAN interface WLAN interface.
+                Only one WLAN interface can be bridged with the LAN interface
+                and none of the guest networks can be bridged.
+            is_nat_enabled: If True, start NAT on the AP to allow the DUT to be
+                able to access the internet if the WAN port is connected to the
+                internet.
+            additional_parameters: Parameters that can sent directly into the
+                hostapd config file.  This can be used for debugging and or
+                adding one off parameters into the config.
+
+        Returns:
+            An identifier for each ssid being started. These identifiers can be
+            used later by this controller to control the ap.
+
+        Raises:
+            Error: When the ap can't be brought up.
+        """
+        if additional_parameters is None:
+            additional_parameters = {}
+
+        if hostapd_config.frequency < 5000:
+            interface = self.wlan_2g
+            subnet = self._AP_2G_SUBNET
+        else:
+            interface = self.wlan_5g
+            subnet = self._AP_5G_SUBNET
+
+        # radvd requires the interface to have a IPv6 link-local address.
+        if radvd_config:
+            self.ssh.run(f"sysctl -w net.ipv6.conf.{interface}.disable_ipv6=0")
+            self.ssh.run(f"sysctl -w net.ipv6.conf.{interface}.forwarding=1")
+
+        # In order to handle dhcp servers on any interface, the initiation of
+        # the dhcp server must be done after the wlan interfaces are figured
+        # out as opposed to being in __init__
+        self._dhcp = DhcpServer(self.ssh, interface=interface)
+
+        # For multi bssid configurations the mac address
+        # of the wireless interface needs to have enough space to mask out
+        # up to 8 different mac addresses. So in for one interface the range is
+        # hex 0-7 and for the other the range is hex 8-f.
+        interface_mac_orig = None
+        cmd = f"ip link show {interface}|grep ether|awk -F' ' '{{print $2}}'"
+        interface_mac_orig = self.ssh.run(cmd)
+        if interface == self.wlan_5g:
+            hostapd_config.bssid = f"{interface_mac_orig.stdout[:-1]}0"
+            last_octet = 1
+        if interface == self.wlan_2g:
+            hostapd_config.bssid = f"{interface_mac_orig.stdout[:-1]}8"
+            last_octet = 9
+        if interface in self._aps:
+            raise ValueError(
+                "No WiFi interface available for AP on "
+                f"channel {hostapd_config.channel}"
+            )
+
+        apd = Hostapd(self.ssh, interface)
+        new_instance = _ApInstance(hostapd=apd, subnet=subnet)
+        self._aps[interface] = new_instance
+
+        # Turn off the DHCP server, we're going to change its settings.
+        self.stop_dhcp()
+        # Clear all routes to prevent old routes from interfering.
+        self._route_cmd.clear_routes(net_interface=interface)
+        # Add IPv6 link-local route so packets destined to the AP will be
+        # processed by the AP. This is necessary if an iperf server is running
+        # on the AP, but not for traffic handled by the Linux networking stack
+        # such as ping.
+        if radvd_config:
+            self._route_cmd.add_route(interface, "fe80::/64")
+
+        self._dhcp_bss = dict()
+        if hostapd_config.bss_lookup:
+            # The self._dhcp_bss dictionary is created to hold the key/value
+            # pair of the interface name and the ip scope that will be
+            # used for the particular interface.  The a, b, c, d
+            # variables below are the octets for the ip address.  The
+            # third octet is then incremented for each interface that
+            # is requested.  This part is designed to bring up the
+            # hostapd interfaces and not the DHCP servers for each
+            # interface.
+            counter = 1
+            for bss in hostapd_config.bss_lookup:
+                if interface_mac_orig:
+                    hostapd_config.bss_lookup[bss].bssid = (
+                        interface_mac_orig.stdout[:-1] + hex(last_octet)[-1:]
+                    )
+                self._route_cmd.clear_routes(net_interface=str(bss))
+                if interface is self.wlan_2g:
+                    starting_ip_range = self._AP_2G_SUBNET_STR
+                else:
+                    starting_ip_range = self._AP_5G_SUBNET_STR
+                a, b, c, d = starting_ip_range.split(".")
+                self._dhcp_bss[bss] = Subnet(
+                    ipaddress.IPv4Network(f"{a}.{b}.{int(c) + counter}.{d}")
+                )
+                counter = counter + 1
+                last_octet = last_octet + 1
+
+        apd.start(hostapd_config, additional_parameters=additional_parameters)
+
+        # The DHCP serer requires interfaces to have ips and routes before
+        # the server will come up.
+        interface_ip = ipaddress.ip_interface(
+            f"{subnet.router}/{subnet.network.netmask}"
+        )
+        if setup_bridge is True:
+            bridge_interface_name = "eth_test"
+            interfaces = [interface]
+            if self.lan:
+                interfaces.append(self.lan)
+            self.create_bridge(bridge_interface_name, interfaces)
+            self._ip_cmd.set_ipv4_address(bridge_interface_name, interface_ip)
+        else:
+            self._ip_cmd.set_ipv4_address(interface, interface_ip)
+        if hostapd_config.bss_lookup:
+            # This loop goes through each interface that was setup for
+            # hostapd and assigns the DHCP scopes that were defined but
+            # not used during the hostapd loop above.  The k and v
+            # variables represent the interface name, k, and dhcp info, v.
+            for k, v in self._dhcp_bss.items():
+                bss_interface_ip = ipaddress.ip_interface(
+                    f"{self._dhcp_bss[k].router}/{self._dhcp_bss[k].network.netmask}"
+                )
+                self._ip_cmd.set_ipv4_address(str(k), bss_interface_ip)
+
+        # Restart the DHCP server with our updated list of subnets.
+        configured_subnets = self.get_configured_subnets()
+        dhcp_conf = DhcpConfig(subnets=configured_subnets)
+        self.start_dhcp(dhcp_conf=dhcp_conf)
+        if is_nat_enabled:
+            self.start_nat()
+            self.enable_forwarding()
+        else:
+            self.stop_nat()
+            self.enable_forwarding()
+        if radvd_config:
+            radvd_interface = bridge_interface_name if setup_bridge else interface
+            self._radvd = Radvd(self.ssh, radvd_interface)
+            self._radvd.start(radvd_config)
+        else:
+            self._radvd = None
+
+        bss_interfaces = [bss for bss in hostapd_config.bss_lookup]
+        bss_interfaces.append(interface)
+
+        return bss_interfaces
+
+    def get_configured_subnets(self) -> list[Subnet]:
+        """Get the list of configured subnets on the access point.
+
+        This allows consumers of the access point objects create custom DHCP
+        configs with the correct subnets.
+
+        Returns: a list of Subnet objects
+        """
+        configured_subnets = [x.subnet for x in self._aps.values()]
+        for k, v in self._dhcp_bss.items():
+            configured_subnets.append(v)
+        return configured_subnets
+
+    def start_dhcp(self, dhcp_conf: DhcpConfig) -> None:
+        """Start a DHCP server for the specified subnets.
+
+        This allows consumers of the access point objects to control DHCP.
+
+        Args:
+            dhcp_conf: A DhcpConfig object.
+
+        Raises:
+            Error: Raised when a dhcp server error is found.
+        """
+        if self._dhcp is not None:
+            self._dhcp.start(config=dhcp_conf)
+
+    def stop_dhcp(self) -> None:
+        """Stop DHCP for this AP object.
+
+        This allows consumers of the access point objects to control DHCP.
+        """
+        if self._dhcp is not None:
+            self._dhcp.stop()
+
+    def get_dhcp_logs(self) -> str | None:
+        """Get DHCP logs for this AP object.
+
+        This allows consumers of the access point objects to validate DHCP
+        behavior.
+
+        Returns:
+            A string of the dhcp server logs, or None is a DHCP server has not
+            been started.
+        """
+        if self._dhcp is not None:
+            return self._dhcp.get_logs()
+        return None
+
+    def get_hostapd_logs(self) -> dict[str, str]:
+        """Get hostapd logs for all interfaces on AP object.
+
+        This allows consumers of the access point objects to validate hostapd
+        behavior.
+
+        Returns: A dict with {interface: log} from hostapd instances.
+        """
+        hostapd_logs: dict[str, str] = dict()
+        for identifier in self._aps:
+            hostapd_logs[identifier] = self._aps[identifier].hostapd.pull_logs()
+        return hostapd_logs
+
+    def get_radvd_logs(self) -> str | None:
+        """Get radvd logs for this AP object.
+
+        This allows consumers of the access point objects to validate radvd
+        behavior.
+
+        Returns:
+            A string of the radvd logs, or None is a radvd server has not been
+            started.
+        """
+        if self._radvd:
+            return self._radvd.pull_logs()
+        return None
+
+    def enable_forwarding(self) -> None:
+        """Enable IPv4 and IPv6 forwarding on the AP.
+
+        When forwarding is enabled, the access point is able to route IP packets
+        between devices in the same subnet.
+        """
+        self.ssh.run("echo 1 > /proc/sys/net/ipv4/ip_forward")
+        self.ssh.run("echo 1 > /proc/sys/net/ipv6/conf/all/forwarding")
+
+    def start_nat(self) -> None:
+        """Start NAT on the AP.
+
+        This allows consumers of the access point objects to enable NAT
+        on the AP.
+
+        Note that this is currently a global setting, since we don't
+        have per-interface masquerade rules.
+        """
+        # The following three commands are needed to enable NAT between
+        # the WAN and LAN/WLAN ports.  This means anyone connecting to the
+        # WLAN/LAN ports will be able to access the internet if the WAN port
+        # is connected to the internet.
+        self.ssh.run("iptables -t nat -F")
+        self.ssh.run(f"iptables -t nat -A POSTROUTING -o {self.wan} -j MASQUERADE")
+
+    def stop_nat(self) -> None:
+        """Stop NAT on the AP.
+
+        This allows consumers of the access point objects to disable NAT on the
+        AP.
+
+        Note that this is currently a global setting, since we don't have
+        per-interface masquerade rules.
+        """
+        self.ssh.run("iptables -t nat -F")
+
+    def create_bridge(self, bridge_name: str, interfaces: list[str]) -> None:
+        """Create the specified bridge and bridge the specified interfaces.
+
+        Args:
+            bridge_name: The name of the bridge to create.
+            interfaces: A list of interfaces to add to the bridge.
+        """
+
+        # Create the bridge interface
+        self.ssh.run(f"brctl addbr {bridge_name}")
+
+        for interface in interfaces:
+            self.ssh.run(f"brctl addif {bridge_name} {interface}")
+
+        self.ssh.run(f"ip link set {bridge_name} up")
+
+    def remove_bridge(self, bridge_name: str) -> None:
+        """Removes the specified bridge
+
+        Args:
+            bridge_name: The name of the bridge to remove.
+        """
+        # Check if the bridge exists.
+        #
+        # Cases where it may not are if we failed to initialize properly
+        #
+        # Or if we're doing 2.4Ghz and 5Ghz SSIDs and we've already torn
+        # down the bridge once, but we got called for each band.
+        result = self.ssh.run(f"brctl show {bridge_name}", ignore_status=True)
+
+        # If the bridge exists, we'll get an exit_status of 0, indicating
+        # success, so we can continue and remove the bridge.
+        if result.returncode == 0:
+            self.ssh.run(f"ip link set {bridge_name} down")
+            self.ssh.run(f"brctl delbr {bridge_name}")
+
+    def get_bssid_from_ssid(self, ssid: str, band: str) -> str | None:
+        """Gets the BSSID from a provided SSID
+
+        Args:
+            ssid: An SSID string.
+            band: 2G or 5G Wifi band.
+        Returns: The BSSID if on the AP or None if SSID could not be found.
+        """
+        if band == hostapd_constants.BAND_2G:
+            interfaces = [self.wlan_2g, ssid]
+        else:
+            interfaces = [self.wlan_5g, ssid]
+
+        # Get the interface name associated with the given ssid.
+        for interface in interfaces:
+            iw_output = self.ssh.run(
+                f"iw dev {interface} info|grep ssid|awk -F' ' '{{print $2}}'"
+            )
+            if "command failed: No such device" in iw_output.stderr:
+                continue
+            else:
+                # If the configured ssid is equal to the given ssid, we found
+                # the right interface.
+                if iw_output.stdout == ssid:
+                    iw_output = self.ssh.run(
+                        f"iw dev {interface} info|grep addr|awk -F' ' '{{print $2}}'"
+                    )
+                    return iw_output.stdout
+        return None
+
+    def stop_ap(self, identifier: str) -> None:
+        """Stops a running ap on this controller.
+
+        Args:
+            identifier: The identify of the ap that should be taken down.
+        """
+
+        instance = self._aps.get(identifier)
+        if instance is None:
+            raise ValueError(f"Invalid identifier {identifier} given")
+
+        if self._radvd:
+            self._radvd.stop()
+        try:
+            self.stop_dhcp()
+        except NoInterfaceError:
+            pass
+        self.stop_nat()
+        instance.hostapd.stop()
+        self._ip_cmd.clear_ipv4_addresses(identifier)
+
+        del self._aps[identifier]
+        bridge_interfaces = self.interfaces.get_bridge_interface()
+        for iface in bridge_interfaces:
+            BRIDGE_DOWN = f"ip link set {iface} down"
+            BRIDGE_DEL = f"brctl delbr {iface}"
+            self.ssh.run(BRIDGE_DOWN)
+            self.ssh.run(BRIDGE_DEL)
+
+    def stop_all_aps(self) -> None:
+        """Stops all running aps on this device."""
+
+        for ap in list(self._aps.keys()):
+            self.stop_ap(ap)
+
+    def close(self) -> None:
+        """Called to take down the entire access point.
+
+        When called will stop all aps running on this host, shutdown the dhcp
+        server, and stop the ssh connection.
+        """
+
+        if self._aps:
+            self.stop_all_aps()
+        self.ssh.close()
+
+    def generate_bridge_configs(self, channel: int) -> tuple[str, str | None, str]:
+        """Generate a list of configs for a bridge between LAN and WLAN.
+
+        Args:
+            channel: the channel WLAN interface is brought up on
+            iface_lan: the LAN interface to bridge
+        Returns:
+            configs: tuple containing iface_wlan, iface_lan and bridge_ip
+        """
+
+        if channel < 15:
+            iface_wlan = self.wlan_2g
+            subnet_str = self._AP_2G_SUBNET_STR
+        else:
+            iface_wlan = self.wlan_5g
+            subnet_str = self._AP_5G_SUBNET_STR
+
+        iface_lan = self.lan
+
+        a, b, c, _ = subnet_str.strip("/24").split(".")
+        bridge_ip = f"{a}.{b}.{c}.{BRIDGE_IP_LAST}"
+
+        return (iface_wlan, iface_lan, bridge_ip)
+
+    def ping(
+        self,
+        dest_ip: str,
+        count: int = 3,
+        interval: int = 1000,
+        timeout: int = 1000,
+        size: int = 56,
+        additional_ping_params: str = "",
+    ) -> utils.PingResult:
+        """Pings from AP to dest_ip, returns dict of ping stats (see utils.ping)"""
+        return utils.ping(
+            self.ssh,
+            dest_ip,
+            count=count,
+            interval=interval,
+            timeout=timeout,
+            size=size,
+            additional_ping_params=additional_ping_params,
+        )
+
+    def can_ping(
+        self,
+        dest_ip: str,
+        count: int = 1,
+        interval: int = 1000,
+        timeout: int = 1000,
+        size: int = 56,
+        additional_ping_params: str = "",
+    ) -> bool:
+        """Returns whether ap can ping dest_ip (see utils.can_ping)"""
+        return utils.can_ping(
+            self.ssh,
+            dest_ip,
+            count=count,
+            interval=interval,
+            timeout=timeout,
+            size=size,
+            additional_ping_params=additional_ping_params,
+        )
+
+    def hard_power_cycle(
+        self,
+        pdus: list[PduDevice],
+        hostapd_configs: list[HostapdConfig] | None = None,
+    ) -> None:
+        """Kills, then restores power to AccessPoint, verifying it goes down and
+        comes back online cleanly.
+
+        Args:
+            pdus: PDUs in the testbed
+            hostapd_configs: Hostapd settings. If present, these networks will
+                be spun up after the AP has rebooted. This list can either
+                contain HostapdConfig objects, or dictionaries with the start_ap
+                params
+                    (i.e  { 'hostapd_config': <HostapdConfig>,
+                            'setup_bridge': <bool>,
+                            'additional_parameters': <dict> } ).
+        Raise:
+            Error, if no PduDevice is provided in AccessPoint config.
+            ConnectionError, if AccessPoint fails to go offline or come back.
+        """
+        if not self.device_pdu_config:
+            raise Error("No PduDevice provided in AccessPoint config.")
+
+        if hostapd_configs is None:
+            hostapd_configs = []
+
+        self.log.info(f"Power cycling")
+        ap_pdu, ap_pdu_port = get_pdu_port_for_device(self.device_pdu_config, pdus)
+
+        self.log.info(f"Killing power")
+        ap_pdu.off(ap_pdu_port)
+
+        self.log.info("Verifying AccessPoint is unreachable.")
+        self.ssh_provider.wait_until_unreachable()
+        self.log.info("AccessPoint is unreachable as expected.")
+
+        self._aps.clear()
+
+        self.log.info(f"Restoring power")
+        ap_pdu.on(ap_pdu_port)
+
+        self.log.info("Waiting for AccessPoint to become available via SSH.")
+        self.ssh_provider.wait_until_reachable()
+        self.log.info("AccessPoint responded to SSH.")
+
+        # Allow 5 seconds for OS to finish getting set up
+        time.sleep(5)
+        self._initial_ap()
+        self.log.info("Power cycled successfully")
+
+        for settings in hostapd_configs:
+            if isinstance(settings, HostapdConfig):
+                config = settings
+                setup_bridge = False
+                additional_parameters: dict[str, Any] = {}
+
+            elif isinstance(settings, dict):
+                config = settings["hostapd_config"]
+                setup_bridge = settings.get("setup_bridge", False)
+                additional_parameters = settings.get("additional_parameters", {})
+            else:
+                raise TypeError(
+                    "Items in hostapd_configs list must either be "
+                    "HostapdConfig objects or dictionaries."
+                )
+
+            self.log.info(f"Restarting network {config.ssid}")
+            self.start_ap(
+                config,
+                setup_bridge=setup_bridge,
+                additional_parameters=additional_parameters,
+            )
+
+    def channel_switch(self, identifier: str, channel_num: int) -> None:
+        """Switch to a different channel on the given AP."""
+        instance = self._aps.get(identifier)
+        if instance is None:
+            raise ValueError(f"Invalid identifier {identifier} given")
+        self.log.info(f"channel switch to channel {channel_num}")
+        instance.hostapd.channel_switch(channel_num)
+
+    def get_current_channel(self, identifier: str) -> int:
+        """Find the current channel on the given AP."""
+        instance = self._aps.get(identifier)
+        if instance is None:
+            raise ValueError(f"Invalid identifier {identifier} given")
+        return instance.hostapd.get_current_channel()
+
+    def get_stas(self, identifier: str) -> set[str]:
+        """Return MAC addresses of all associated STAs on the given AP."""
+        instance = self._aps.get(identifier)
+        if instance is None:
+            raise ValueError(f"Invalid identifier {identifier} given")
+        return instance.hostapd.get_stas()
+
+    def sta_authenticated(self, identifier: str, sta_mac: str) -> bool:
+        """Is STA authenticated?"""
+        instance = self._aps.get(identifier)
+        if instance is None:
+            raise ValueError(f"Invalid identifier {identifier} given")
+        return instance.hostapd.sta_authenticated(sta_mac)
+
+    def sta_associated(self, identifier: str, sta_mac: str) -> bool:
+        """Is STA associated?"""
+        instance = self._aps.get(identifier)
+        if instance is None:
+            raise ValueError(f"Invalid identifier {identifier} given")
+        return instance.hostapd.sta_associated(sta_mac)
+
+    def sta_authorized(self, identifier: str, sta_mac: str) -> bool:
+        """Is STA authorized (802.1X controlled port open)?"""
+        instance = self._aps.get(identifier)
+        if instance is None:
+            raise ValueError(f"Invalid identifier {identifier} given")
+        return instance.hostapd.sta_authorized(sta_mac)
+
+    def get_sta_extended_capabilities(
+        self, identifier: str, sta_mac: str
+    ) -> ExtendedCapabilities:
+        """Get extended capabilities for the given STA, as seen by the AP."""
+        instance = self._aps.get(identifier)
+        if instance is None:
+            raise ValueError(f"Invalid identifier {identifier} given")
+        return instance.hostapd.get_sta_extended_capabilities(sta_mac)
+
+    def send_bss_transition_management_req(
+        self, identifier: str, sta_mac: str, request: BssTransitionManagementRequest
+    ) -> None:
+        """Send a BSS Transition Management request to an associated STA."""
+        instance = self._aps.get(identifier)
+        if instance is None:
+            raise ValueError(f"Invalid identifier {identifier} given")
+        instance.hostapd.send_bss_transition_management_req(sta_mac, request)
+
+
+def setup_ap(
+    access_point: AccessPoint,
+    profile_name: str,
+    channel: int,
+    ssid: str,
+    mode: str | None = None,
+    preamble: bool | None = None,
+    beacon_interval: int | None = None,
+    dtim_period: int | None = None,
+    frag_threshold: int | None = None,
+    rts_threshold: int | None = None,
+    force_wmm: bool | None = None,
+    hidden: bool | None = False,
+    security: Security | None = None,
+    pmf_support: int | None = None,
+    additional_ap_parameters: dict[str, Any] | None = None,
+    password: str | None = None,
+    n_capabilities: list[Any] | None = None,
+    ac_capabilities: list[Any] | None = None,
+    vht_bandwidth: int | None = None,
+    wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
+    setup_bridge: bool = False,
+    is_ipv6_enabled: bool = False,
+    is_nat_enabled: bool = True,
+) -> list[str]:
+    """Creates a hostapd profile and runs it on an ap. This is a convenience
+    function that allows us to start an ap with a single function, without first
+    creating a hostapd config.
+
+    Args:
+        access_point: An ACTS access_point controller
+        profile_name: The profile name of one of the hostapd ap presets.
+        channel: What channel to set the AP to.
+        preamble: Whether to set short or long preamble
+        beacon_interval: The beacon interval
+        dtim_period: Length of dtim period
+        frag_threshold: Fragmentation threshold
+        rts_threshold: RTS threshold
+        force_wmm: Enable WMM or not
+        hidden: Advertise the SSID or not
+        security: What security to enable.
+        pmf_support: Whether pmf is not disabled, enabled, or required
+        additional_ap_parameters: Additional parameters to send the AP.
+        password: Password to connect to WLAN if necessary.
+        check_connectivity: Whether to check for internet connectivity.
+        wnm_features: WNM features to enable on the AP.
+        setup_bridge: Whether to bridge the LAN interface WLAN interface.
+            Only one WLAN interface can be bridged with the LAN interface
+            and none of the guest networks can be bridged.
+        is_ipv6_enabled: If True, start a IPv6 router advertisement daemon
+        is_nat_enabled: If True, start NAT on the AP to allow the DUT to be able
+            to access the internet if the WAN port is connected to the internet.
+
+    Returns:
+        An identifier for each ssid being started. These identifiers can be
+        used later by this controller to control the ap.
+
+    Raises:
+        Error: When the ap can't be brought up.
+    """
+    if additional_ap_parameters is None:
+        additional_ap_parameters = {}
+
+    ap = create_ap_preset(
+        profile_name=profile_name,
+        iface_wlan_2g=access_point.wlan_2g,
+        iface_wlan_5g=access_point.wlan_5g,
+        channel=channel,
+        ssid=ssid,
+        mode=mode,
+        short_preamble=preamble,
+        beacon_interval=beacon_interval,
+        dtim_period=dtim_period,
+        frag_threshold=frag_threshold,
+        rts_threshold=rts_threshold,
+        force_wmm=force_wmm,
+        hidden=hidden,
+        bss_settings=[],
+        security=security,
+        pmf_support=pmf_support,
+        n_capabilities=n_capabilities,
+        ac_capabilities=ac_capabilities,
+        vht_bandwidth=vht_bandwidth,
+        wnm_features=wnm_features,
+    )
+    return access_point.start_ap(
+        hostapd_config=ap,
+        radvd_config=RadvdConfig() if is_ipv6_enabled else None,
+        setup_bridge=setup_bridge,
+        is_nat_enabled=is_nat_enabled,
+        additional_parameters=additional_ap_parameters,
+    )
+
+
+def create(configs: Any) -> list[AccessPoint]:
+    """Creates ap controllers from a json config.
+
+    Creates an ap controller from either a list, or a single
+    element. The element can either be just the hostname or a dictionary
+    containing the hostname and username of the ap to connect to over ssh.
+
+    Args:
+        The json configs that represent this controller.
+
+    Returns:
+        A new AccessPoint.
+    """
+    return [AccessPoint(c) for c in configs]
+
+
+def destroy(aps: list[AccessPoint]) -> None:
+    """Destroys a list of access points.
+
+    Args:
+        aps: The list of access points to destroy.
+    """
+    for ap in aps:
+        ap.close()
+
+
+def get_info(aps: list[AccessPoint]) -> list[str]:
+    """Get information on a list of access points.
+
+    Args:
+        aps: A list of AccessPoints.
+
+    Returns:
+        A list of all aps hostname.
+    """
+    return [ap.ssh_settings.hostname for ap in aps]
diff --git a/packages/antlion/controllers/adb.py b/packages/antlion/controllers/adb.py
new file mode 100644
index 0000000..61597ff
--- /dev/null
+++ b/packages/antlion/controllers/adb.py
@@ -0,0 +1,294 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import re
+import shlex
+import shutil
+
+from antlion.controllers.adb_lib.error import AdbCommandError, AdbError
+from antlion.libs.proc import job
+
+DEFAULT_ADB_TIMEOUT = 60
+DEFAULT_ADB_PULL_TIMEOUT = 180
+
+ADB_REGEX = re.compile("adb:")
+# Uses a regex to be backwards compatible with previous versions of ADB
+# (N and above add the serial to the error msg).
+DEVICE_NOT_FOUND_REGEX = re.compile("error: device (?:'.*?' )?not found")
+DEVICE_OFFLINE_REGEX = re.compile("error: device offline")
+# Raised when adb forward commands fail to forward a port.
+CANNOT_BIND_LISTENER_REGEX = re.compile("error: cannot bind listener:")
+# Expected output is "Android Debug Bridge version 1.0.XX
+ADB_VERSION_REGEX = re.compile("Android Debug Bridge version 1.0.(\d+)")
+GREP_REGEX = re.compile("grep(\s+)")
+
+ROOT_USER_ID = "0"
+SHELL_USER_ID = "2000"
+
+
+def parsing_parcel_output(output):
+    """Parsing the adb output in Parcel format.
+
+    Parsing the adb output in format:
+      Result: Parcel(
+        0x00000000: 00000000 00000014 00390038 00340031 '........8.9.1.4.'
+        0x00000010: 00300038 00300030 00300030 00340032 '8.0.0.0.0.0.2.4.'
+        0x00000020: 00350034 00330035 00320038 00310033 '4.5.5.3.8.2.3.1.'
+        0x00000030: 00000000                            '....            ')
+    """
+    output = "".join(re.findall(r"'(.*)'", output))
+    return re.sub(r"[.\s]", "", output)
+
+
+class AdbProxy(object):
+    """Proxy class for ADB.
+
+    For syntactic reasons, the '-' in adb commands need to be replaced with
+    '_'. Can directly execute adb commands on an object:
+    >> adb = AdbProxy(<serial>)
+    >> adb.start_server()
+    >> adb.devices() # will return the console output of "adb devices".
+    """
+
+    def __init__(self, serial="", ssh_connection=None):
+        """Construct an instance of AdbProxy.
+
+        Args:
+            serial: str serial number of Android device from `adb devices`
+            ssh_connection: SshConnection instance if the Android device is
+                            connected to a remote host that we can reach via SSH.
+        """
+        self.serial = serial
+        self._server_local_port = None
+        adb_path = shutil.which("adb")
+        adb_cmd = [shlex.quote(adb_path)]
+        if serial:
+            adb_cmd.append(f"-s {serial}")
+        if ssh_connection is not None:
+            # Kill all existing adb processes on the remote host (if any)
+            # Note that if there are none, then pkill exits with non-zero status
+            ssh_connection.run("pkill adb", ignore_status=True)
+            # Copy over the adb binary to a temp dir
+            temp_dir = ssh_connection.run("mktemp -d").stdout.strip()
+            ssh_connection.send_file(adb_path, temp_dir)
+            # Start up a new adb server running as root from the copied binary.
+            remote_adb_cmd = "%s/adb %s root" % (
+                temp_dir,
+                "-s %s" % serial if serial else "",
+            )
+            ssh_connection.run(remote_adb_cmd)
+            # Proxy a local port to the adb server port
+            local_port = ssh_connection.create_ssh_tunnel(5037)
+            self._server_local_port = local_port
+
+        if self._server_local_port:
+            adb_cmd.append(f"-P {local_port}")
+        self.adb_str = " ".join(adb_cmd)
+        self._ssh_connection = ssh_connection
+
+    def get_user_id(self):
+        """Returns the adb user. Either 2000 (shell) or 0 (root)."""
+        return self.shell("id -u")
+
+    def is_root(self, user_id=None):
+        """Checks if the user is root.
+
+        Args:
+            user_id: if supplied, the id to check against.
+        Returns:
+            True if the user is root. False otherwise.
+        """
+        if not user_id:
+            user_id = self.get_user_id()
+        return user_id == ROOT_USER_ID
+
+    def ensure_root(self):
+        """Ensures the user is root after making this call.
+
+        Note that this will still fail if the device is a user build, as root
+        is not accessible from a user build.
+
+        Returns:
+            False if the device is a user build. True otherwise.
+        """
+        self.ensure_user(ROOT_USER_ID)
+        return self.is_root()
+
+    def ensure_user(self, user_id=SHELL_USER_ID):
+        """Ensures the user is set to the given user.
+
+        Args:
+            user_id: The id of the user.
+        """
+        if self.is_root(user_id):
+            self.root()
+        else:
+            self.unroot()
+        self.wait_for_device()
+        return self.get_user_id() == user_id
+
+    def _exec_cmd(self, cmd, ignore_status=False, timeout=DEFAULT_ADB_TIMEOUT):
+        """Executes adb commands in a new shell.
+
+        This is specific to executing adb commands.
+
+        Args:
+            cmd: A string or list that is the adb command to execute.
+
+        Returns:
+            The stdout of the adb command.
+
+        Raises:
+            AdbError for errors in ADB operations.
+            AdbCommandError for errors from commands executed through ADB.
+        """
+        if isinstance(cmd, list):
+            cmd = " ".join(cmd)
+        result = job.run(cmd, ignore_status=True, timeout_sec=timeout)
+        ret, out, err = result.exit_status, result.stdout, result.stderr
+
+        if any(
+            pattern.match(err)
+            for pattern in [
+                ADB_REGEX,
+                DEVICE_OFFLINE_REGEX,
+                DEVICE_NOT_FOUND_REGEX,
+                CANNOT_BIND_LISTENER_REGEX,
+            ]
+        ):
+            raise AdbError(cmd=cmd, stdout=out, stderr=err, ret_code=ret)
+        if "Result: Parcel" in out:
+            return parsing_parcel_output(out)
+        if ignore_status or (ret == 1 and GREP_REGEX.search(cmd)):
+            return out or err
+        if ret != 0:
+            raise AdbCommandError(cmd=cmd, stdout=out, stderr=err, ret_code=ret)
+        return out
+
+    def _exec_adb_cmd(self, name, arg_str, **kwargs):
+        return self._exec_cmd(f"{self.adb_str} {name} {arg_str}", **kwargs)
+
+    def _exec_cmd_nb(self, cmd, **kwargs):
+        """Executes adb commands in a new shell, non blocking.
+
+        Args:
+            cmds: A string that is the adb command to execute.
+
+        """
+        return job.run_async(cmd, **kwargs)
+
+    def _exec_adb_cmd_nb(self, name, arg_str, **kwargs):
+        return self._exec_cmd_nb(f"{self.adb_str} {name} {arg_str}", **kwargs)
+
+    def tcp_forward(self, host_port, device_port):
+        """Starts tcp forwarding from localhost to this android device.
+
+        Args:
+            host_port: Port number to use on localhost
+            device_port: Port number to use on the android device.
+
+        Returns:
+            Forwarded port on host as int or command output string on error
+        """
+        if self._ssh_connection:
+            # We have to hop through a remote host first.
+            #  1) Find some free port on the remote host's localhost
+            #  2) Setup forwarding between that remote port and the requested
+            #     device port
+            remote_port = self._ssh_connection.find_free_port()
+            host_port = self._ssh_connection.create_ssh_tunnel(
+                remote_port, local_port=host_port
+            )
+        output = self.forward(f"tcp:{host_port} tcp:{device_port}", ignore_status=True)
+        # If hinted_port is 0, the output will be the selected port.
+        # Otherwise, there will be no output upon successfully
+        # forwarding the hinted port.
+        if not output:
+            return host_port
+        try:
+            output_int = int(output)
+        except ValueError:
+            return output
+        return output_int
+
+    def remove_tcp_forward(self, host_port):
+        """Stop tcp forwarding a port from localhost to this android device.
+
+        Args:
+            host_port: Port number to use on localhost
+        """
+        if self._ssh_connection:
+            remote_port = self._ssh_connection.close_ssh_tunnel(host_port)
+            if remote_port is None:
+                logging.warning(
+                    "Cannot close unknown forwarded tcp port: %d", host_port
+                )
+                return
+            # The actual port we need to disable via adb is on the remote host.
+            host_port = remote_port
+        self.forward(f"--remove tcp:{host_port}")
+
+    def getprop(self, prop_name):
+        """Get a property of the device.
+
+        This is a convenience wrapper for "adb shell getprop xxx".
+
+        Args:
+            prop_name: A string that is the name of the property to get.
+
+        Returns:
+            A string that is the value of the property, or None if the property
+            doesn't exist.
+        """
+        return self.shell(f"getprop {prop_name}")
+
+    # TODO: This should be abstracted out into an object like the other shell
+    # command.
+    def shell(self, command, ignore_status=False, timeout=DEFAULT_ADB_TIMEOUT):
+        return self._exec_adb_cmd(
+            "shell", shlex.quote(command), ignore_status=ignore_status, timeout=timeout
+        )
+
+    def shell_nb(self, command):
+        return self._exec_adb_cmd_nb("shell", shlex.quote(command))
+
+    def __getattr__(self, name):
+        def adb_call(*args, **kwargs):
+            clean_name = name.replace("_", "-")
+            if clean_name in ["pull", "push", "remount"] and "timeout" not in kwargs:
+                kwargs["timeout"] = DEFAULT_ADB_PULL_TIMEOUT
+            arg_str = " ".join(str(elem) for elem in args)
+            return self._exec_adb_cmd(clean_name, arg_str, **kwargs)
+
+        return adb_call
+
+    def get_version_number(self):
+        """Returns the version number of ADB as an int (XX in 1.0.XX).
+
+        Raises:
+            AdbError if the version number is not found/parsable.
+        """
+        version_output = self.version()
+        match = re.search(ADB_VERSION_REGEX, version_output)
+
+        if not match:
+            logging.error(
+                "Unable to capture ADB version from adb version "
+                "output: %s" % version_output
+            )
+            raise AdbError("adb version", version_output, "", "")
+        return int(match.group(1))
diff --git a/src/antlion/controllers/adb_lib/__init__.py b/packages/antlion/controllers/adb_lib/__init__.py
similarity index 100%
rename from src/antlion/controllers/adb_lib/__init__.py
rename to packages/antlion/controllers/adb_lib/__init__.py
diff --git a/packages/antlion/controllers/adb_lib/error.py b/packages/antlion/controllers/adb_lib/error.py
new file mode 100644
index 0000000..9599214
--- /dev/null
+++ b/packages/antlion/controllers/adb_lib/error.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from antlion import error
+
+
+class AdbError(error.ActsError):
+    """Raised when there is an error in adb operations."""
+
+    def __init__(self, cmd, stdout, stderr, ret_code):
+        super().__init__()
+        self.cmd = cmd
+        self.stdout = stdout
+        self.stderr = stderr
+        self.ret_code = ret_code
+
+    def __str__(self):
+        return ("Error executing adb cmd '%s'. ret: %d, stdout: %s, stderr: %s") % (
+            self.cmd,
+            self.ret_code,
+            self.stdout,
+            self.stderr,
+        )
+
+
+class AdbCommandError(AdbError):
+    """Raised when there is an error in the command being run through ADB."""
diff --git a/packages/antlion/controllers/android_device.py b/packages/antlion/controllers/android_device.py
new file mode 100755
index 0000000..fd679ef
--- /dev/null
+++ b/packages/antlion/controllers/android_device.py
@@ -0,0 +1,1808 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+import logging
+import math
+import os
+import re
+import shutil
+import socket
+import time
+from datetime import datetime
+
+from antlion import context
+from antlion import logger as acts_logger
+from antlion import tracelogger, utils
+from antlion.controllers import adb, fastboot
+from antlion.controllers.adb_lib.error import AdbError
+from antlion.controllers.android_lib import errors
+from antlion.controllers.android_lib import events as android_events
+from antlion.controllers.android_lib import logcat, services
+from antlion.controllers.sl4a_lib import sl4a_manager
+from antlion.controllers.utils_lib.ssh import connection, settings
+from antlion.event import event_bus
+from antlion.libs.proc import job
+from antlion.runner import Runner
+
+MOBLY_CONTROLLER_CONFIG_NAME = "AndroidDevice"
+ACTS_CONTROLLER_REFERENCE_NAME = "android_devices"
+
+ANDROID_DEVICE_PICK_ALL_TOKEN = "*"
+# Key name for SL4A extra params in config file
+ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY = "sl4a_client_port"
+ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY = "sl4a_forwarded_port"
+ANDROID_DEVICE_SL4A_SERVER_PORT_KEY = "sl4a_server_port"
+# Key name for adb logcat extra params in config file.
+ANDROID_DEVICE_ADB_LOGCAT_PARAM_KEY = "adb_logcat_param"
+ANDROID_DEVICE_EMPTY_CONFIG_MSG = "Configuration is empty, abort!"
+ANDROID_DEVICE_NOT_LIST_CONFIG_MSG = "Configuration should be a list, abort!"
+CRASH_REPORT_PATHS = (
+    "/data/tombstones/",
+    "/data/vendor/ramdump/",
+    "/data/ramdump/",
+    "/data/vendor/ssrdump",
+    "/data/vendor/ramdump/bluetooth",
+    "/data/vendor/log/cbd",
+)
+CRASH_REPORT_SKIPS = (
+    "RAMDUMP_RESERVED",
+    "RAMDUMP_STATUS",
+    "RAMDUMP_OUTPUT",
+    "bluetooth",
+)
+ALWAYS_ON_LOG_PATH = "/data/vendor/radio/logs/always-on"
+DEFAULT_QXDM_LOG_PATH = "/data/vendor/radio/diag_logs"
+DEFAULT_SDM_LOG_PATH = "/data/vendor/slog/"
+DEFAULT_SCREENSHOT_PATH = "/sdcard/Pictures/screencap"
+BUG_REPORT_TIMEOUT = 1800
+PULL_TIMEOUT = 300
+PORT_RETRY_COUNT = 3
+ADB_ROOT_RETRY_COUNT = 2
+ADB_ROOT_RETRY_INTERVAL = 10
+IPERF_TIMEOUT = 60
+SL4A_APK_NAME = "com.googlecode.android_scripting"
+WAIT_FOR_DEVICE_TIMEOUT = 180
+ENCRYPTION_WINDOW = "CryptKeeper"
+DEFAULT_DEVICE_PASSWORD = "1111"
+RELEASE_ID_REGEXES = [re.compile(r"\w+\.\d+\.\d+"), re.compile(r"N\w+")]
+
+
+def create(configs):
+    """Creates AndroidDevice controller objects.
+
+    Args:
+        configs: A list of dicts, each representing a configuration for an
+                 Android device.
+
+    Returns:
+        A list of AndroidDevice objects.
+    """
+    if not configs:
+        raise errors.AndroidDeviceConfigError(ANDROID_DEVICE_EMPTY_CONFIG_MSG)
+    elif configs == ANDROID_DEVICE_PICK_ALL_TOKEN:
+        ads = get_all_instances()
+    elif not isinstance(configs, list):
+        raise errors.AndroidDeviceConfigError(ANDROID_DEVICE_NOT_LIST_CONFIG_MSG)
+    elif isinstance(configs[0], str):
+        # Configs is a list of serials.
+        ads = get_instances(configs)
+    else:
+        # Configs is a list of dicts.
+        ads = get_instances_with_configs(configs)
+
+    ads[0].log.info(f'The primary device under test is "{ads[0].serial}".')
+
+    for ad in ads:
+        if not ad.is_connected():
+            raise errors.AndroidDeviceError(
+                ("Android device %s is specified in config" " but is not attached.")
+                % ad.serial,
+                serial=ad.serial,
+            )
+    _start_services_on_ads(ads)
+    for ad in ads:
+        if ad.droid:
+            utils.set_location_service(ad, False)
+            utils.sync_device_time(ad)
+    return ads
+
+
+def destroy(ads):
+    """Cleans up AndroidDevice objects.
+
+    Args:
+        ads: A list of AndroidDevice objects.
+    """
+    for ad in ads:
+        try:
+            ad.clean_up()
+        except:
+            ad.log.exception("Failed to clean up properly.")
+
+
+def get_info(ads):
+    """Get information on a list of AndroidDevice objects.
+
+    Args:
+        ads: A list of AndroidDevice objects.
+
+    Returns:
+        A list of dict, each representing info for an AndroidDevice objects.
+    """
+    device_info = []
+    for ad in ads:
+        info = {"serial": ad.serial, "model": ad.model}
+        info.update(ad.build_info)
+        device_info.append(info)
+    return device_info
+
+
+def _start_services_on_ads(ads):
+    """Starts long running services on multiple AndroidDevice objects.
+
+    If any one AndroidDevice object fails to start services, cleans up all
+    existing AndroidDevice objects and their services.
+
+    Args:
+        ads: A list of AndroidDevice objects whose services to start.
+    """
+    running_ads = []
+    for ad in ads:
+        running_ads.append(ad)
+        try:
+            ad.start_services()
+        except:
+            ad.log.exception("Failed to start some services, abort!")
+            destroy(running_ads)
+            raise
+
+
+def _parse_device_list(device_list_str, key):
+    """Parses a byte string representing a list of devices. The string is
+    generated by calling either adb or fastboot.
+
+    Args:
+        device_list_str: Output of adb or fastboot.
+        key: The token that signifies a device in device_list_str.
+
+    Returns:
+        A list of android device serial numbers.
+    """
+    return re.findall(r"(\S+)\t%s" % key, device_list_str)
+
+
+def list_adb_devices():
+    """List all android devices connected to the computer that are detected by
+    adb.
+
+    Returns:
+        A list of android device serials. Empty if there's none.
+    """
+    out = adb.AdbProxy().devices()
+    return _parse_device_list(out, "device")
+
+
+def list_fastboot_devices():
+    """List all android devices connected to the computer that are in in
+    fastboot mode. These are detected by fastboot.
+
+    Returns:
+        A list of android device serials. Empty if there's none.
+    """
+    out = fastboot.FastbootProxy().devices()
+    return _parse_device_list(out, "fastboot")
+
+
+def get_instances(serials):
+    """Create AndroidDevice instances from a list of serials.
+
+    Args:
+        serials: A list of android device serials.
+
+    Returns:
+        A list of AndroidDevice objects.
+    """
+    results = []
+    for s in serials:
+        results.append(AndroidDevice(s))
+    return results
+
+
+def get_instances_with_configs(configs):
+    """Create AndroidDevice instances from a list of json configs.
+
+    Each config should have the required key-value pair "serial".
+
+    Args:
+        configs: A list of dicts each representing the configuration of one
+            android device.
+
+    Returns:
+        A list of AndroidDevice objects.
+    """
+    results = []
+    for c in configs:
+        try:
+            serial = c.pop("serial")
+        except KeyError:
+            raise errors.AndroidDeviceConfigError(
+                f"Required value 'serial' is missing in AndroidDevice config {c}."
+            )
+        client_port = 0
+        if ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY in c:
+            try:
+                client_port = int(c.pop(ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY))
+            except ValueError:
+                raise errors.AndroidDeviceConfigError(
+                    "'%s' is not a valid number for config %s"
+                    % (ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY, c)
+                )
+        server_port = None
+        if ANDROID_DEVICE_SL4A_SERVER_PORT_KEY in c:
+            try:
+                server_port = int(c.pop(ANDROID_DEVICE_SL4A_SERVER_PORT_KEY))
+            except ValueError:
+                raise errors.AndroidDeviceConfigError(
+                    "'%s' is not a valid number for config %s"
+                    % (ANDROID_DEVICE_SL4A_SERVER_PORT_KEY, c)
+                )
+        forwarded_port = 0
+        if ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY in c:
+            try:
+                forwarded_port = int(c.pop(ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY))
+            except ValueError:
+                raise errors.AndroidDeviceConfigError(
+                    "'%s' is not a valid number for config %s"
+                    % (ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY, c)
+                )
+        ssh_config = c.pop("ssh_config", None)
+        ssh_connection = None
+        if ssh_config is not None:
+            ssh_settings = settings.from_config(ssh_config)
+            ssh_connection = connection.SshConnection(ssh_settings)
+        ad = AndroidDevice(
+            serial,
+            ssh_connection=ssh_connection,
+            client_port=client_port,
+            forwarded_port=forwarded_port,
+            server_port=server_port,
+        )
+        ad.load_config(c)
+        results.append(ad)
+    return results
+
+
+def get_all_instances(include_fastboot=False):
+    """Create AndroidDevice instances for all attached android devices.
+
+    Args:
+        include_fastboot: Whether to include devices in bootloader mode or not.
+
+    Returns:
+        A list of AndroidDevice objects each representing an android device
+        attached to the computer.
+    """
+    if include_fastboot:
+        serial_list = list_adb_devices() + list_fastboot_devices()
+        return get_instances(serial_list)
+    return get_instances(list_adb_devices())
+
+
+def filter_devices(ads, func):
+    """Finds the AndroidDevice instances from a list that match certain
+    conditions.
+
+    Args:
+        ads: A list of AndroidDevice instances.
+        func: A function that takes an AndroidDevice object and returns True
+            if the device satisfies the filter condition.
+
+    Returns:
+        A list of AndroidDevice instances that satisfy the filter condition.
+    """
+    results = []
+    for ad in ads:
+        if func(ad):
+            results.append(ad)
+    return results
+
+
+def get_device(ads, **kwargs):
+    """Finds a unique AndroidDevice instance from a list that has specific
+    attributes of certain values.
+
+    Example:
+        get_device(android_devices, label="foo", phone_number="1234567890")
+        get_device(android_devices, model="angler")
+
+    Args:
+        ads: A list of AndroidDevice instances.
+        kwargs: keyword arguments used to filter AndroidDevice instances.
+
+    Returns:
+        The target AndroidDevice instance.
+
+    Raises:
+        AndroidDeviceError is raised if none or more than one device is
+        matched.
+    """
+
+    def _get_device_filter(ad):
+        for k, v in kwargs.items():
+            if not hasattr(ad, k):
+                return False
+            elif getattr(ad, k) != v:
+                return False
+        return True
+
+    filtered = filter_devices(ads, _get_device_filter)
+    if not filtered:
+        raise ValueError(
+            f"Could not find a target device that matches condition: {kwargs}."
+        )
+    elif len(filtered) == 1:
+        return filtered[0]
+    else:
+        serials = [ad.serial for ad in filtered]
+        raise ValueError(f"More than one device matched: {serials}")
+
+
+def take_bug_reports(ads, test_name, begin_time):
+    """Takes bug reports on a list of android devices.
+
+    If you want to take a bug report, call this function with a list of
+    android_device objects in on_fail. But reports will be taken on all the
+    devices in the list concurrently. Bug report takes a relative long
+    time to take, so use this cautiously.
+
+    Args:
+        ads: A list of AndroidDevice instances.
+        test_name: Name of the test case that triggered this bug report.
+        begin_time: Logline format timestamp taken when the test started.
+    """
+
+    def take_br(test_name, begin_time, ad):
+        ad.take_bug_report(test_name, begin_time)
+
+    args = [(test_name, begin_time, ad) for ad in ads]
+    utils.concurrent_exec(take_br, args)
+
+
+class AndroidDevice:
+    """Class representing an android device.
+
+    Each object of this class represents one Android device in ACTS, including
+    handles to adb, fastboot, and sl4a clients. In addition to direct adb
+    commands, this object also uses adb port forwarding to talk to the Android
+    device.
+
+    Attributes:
+        serial: A string that's the serial number of the Android device.
+        log_path: A string that is the path where all logs collected on this
+                  android device should be stored.
+        log: A logger adapted from root logger with added token specific to an
+             AndroidDevice instance.
+        adb_logcat_process: A process that collects the adb logcat.
+        adb: An AdbProxy object used for interacting with the device via adb.
+        fastboot: A FastbootProxy object used for interacting with the device
+                  via fastboot.
+        client_port: Preferred client port number on the PC host side for SL4A
+        forwarded_port: Preferred server port number forwarded from Android
+                        to the host PC via adb for SL4A connections
+        server_port: Preferred server port used by SL4A on Android device
+
+    """
+
+    def __init__(
+        self,
+        serial: str = "",
+        ssh_connection: Runner | None = None,
+        client_port: int = 0,
+        forwarded_port: int = 0,
+        server_port: int | None = None,
+    ):
+        self.serial = serial
+        # logging.log_path only exists when this is used in an ACTS test run.
+        log_path_base = getattr(logging, "log_path", "/tmp/logs")
+        self.log_dir = f"AndroidDevice{serial}"
+        self.log_path = os.path.join(log_path_base, self.log_dir)
+        self.client_port = client_port
+        self.forwarded_port = forwarded_port
+        self.server_port = server_port
+        self.log = tracelogger.TraceLogger(
+            AndroidDeviceLoggerAdapter(logging.getLogger(), {"serial": serial})
+        )
+        self._event_dispatchers = {}
+        self._services = []
+        self.register_service(services.AdbLogcatService(self))
+        self.register_service(services.Sl4aService(self))
+        self.adb_logcat_process = None
+        self.adb = adb.AdbProxy(serial, ssh_connection=ssh_connection)
+        self.fastboot = fastboot.FastbootProxy(serial, ssh_connection=ssh_connection)
+        if not self.is_bootloader:
+            self.root_adb()
+        self._ssh_connection = ssh_connection
+        self.skip_sl4a = False
+        self.crash_report = None
+        self.data_accounting = collections.defaultdict(int)
+        self._sl4a_manager = sl4a_manager.create_sl4a_manager(self.adb)
+        self.last_logcat_timestamp = None
+        # Device info cache.
+        self._user_added_device_info = {}
+        self._sdk_api_level = None
+
+    def clean_up(self):
+        """Cleans up the AndroidDevice object and releases any resources it
+        claimed.
+        """
+        self.stop_services()
+        for service in self._services:
+            service.unregister()
+        self._services.clear()
+        if self._ssh_connection:
+            self._ssh_connection.close()
+
+    def recreate_services(self, serial):
+        """Clean up the AndroidDevice object and re-create adb/sl4a services.
+
+        Unregister the existing services and re-create adb and sl4a services,
+        call this method when the connection break after certain API call
+        (e.g., enable USB tethering by #startTethering)
+
+        Args:
+            serial: the serial number of the AndroidDevice
+        """
+        # Clean the old services
+        for service in self._services:
+            service.unregister()
+        self._services.clear()
+        if self._ssh_connection:
+            self._ssh_connection.close()
+        self._sl4a_manager.stop_service()
+
+        # Wait for old services to stop
+        time.sleep(5)
+
+        # Re-create the new adb and sl4a services
+        self.register_service(services.AdbLogcatService(self))
+        self.register_service(services.Sl4aService(self))
+        self.adb.wait_for_device()
+        self.terminate_all_sessions()
+        self.start_services()
+
+    def register_service(self, service):
+        """Registers the service on the device."""
+        service.register()
+        self._services.append(service)
+
+    # TODO(angli): This function shall be refactored to accommodate all services
+    # and not have hard coded switch for SL4A when b/29157104 is done.
+    def start_services(self, skip_setup_wizard=True):
+        """Starts long running services on the android device.
+
+        1. Start adb logcat capture.
+        2. Start SL4A if not skipped.
+
+        Args:
+            skip_setup_wizard: Whether or not to skip the setup wizard.
+        """
+        if skip_setup_wizard:
+            self.exit_setup_wizard()
+
+        event_bus.post(android_events.AndroidStartServicesEvent(self))
+
+    def stop_services(self):
+        """Stops long running services on the android device.
+
+        Stop adb logcat and terminate sl4a sessions if exist.
+        """
+        event_bus.post(
+            android_events.AndroidStopServicesEvent(self), ignore_errors=True
+        )
+
+    def is_connected(self):
+        out = self.adb.devices()
+        devices = _parse_device_list(out, "device")
+        return self.serial in devices
+
+    @property
+    def build_info(self):
+        """Get the build info of this Android device, including build id and
+        build type.
+
+        This is not available if the device is in bootloader mode.
+
+        Returns:
+            A dict with the build info of this Android device, or None if the
+            device is in bootloader mode.
+        """
+        if self.is_bootloader:
+            self.log.error("Device is in fastboot mode, could not get build " "info.")
+            return
+
+        build_id = self.adb.getprop("ro.build.id")
+        incremental_build_id = self.adb.getprop("ro.build.version.incremental")
+        valid_build_id = False
+        for regex in RELEASE_ID_REGEXES:
+            if re.match(regex, build_id):
+                valid_build_id = True
+                break
+        if not valid_build_id:
+            build_id = incremental_build_id
+
+        info = {
+            "build_id": build_id,
+            "incremental_build_id": incremental_build_id,
+            "build_type": self.adb.getprop("ro.build.type"),
+        }
+        return info
+
+    @property
+    def device_info(self):
+        """Information to be pulled into controller info.
+
+        The latest serial, model, and build_info are included. Additional info
+        can be added via `add_device_info`.
+        """
+        info = {
+            "serial": self.serial,
+            "model": self.model,
+            "build_info": self.build_info,
+            "user_added_info": self._user_added_device_info,
+            "flavor": self.flavor,
+        }
+        return info
+
+    def add_device_info(self, name, info):
+        """Add custom device info to the user_added_info section.
+
+        Adding the same info name the second time will override existing info.
+
+        Args:
+          name: string, name of this info.
+          info: serializable, content of the info.
+        """
+        self._user_added_device_info.update({name: info})
+
+    def sdk_api_level(self):
+        if self._sdk_api_level is not None:
+            return self._sdk_api_level
+        if self.is_bootloader:
+            self.log.error("Device is in fastboot mode. Cannot get build info.")
+            return
+        self._sdk_api_level = int(self.adb.shell("getprop ro.build.version.sdk"))
+        return self._sdk_api_level
+
+    @property
+    def is_bootloader(self):
+        """True if the device is in bootloader mode."""
+        return self.serial in list_fastboot_devices()
+
+    @property
+    def is_adb_root(self):
+        """True if adb is running as root for this device."""
+        try:
+            return "0" == self.adb.shell("id -u")
+        except AdbError:
+            # Wait a bit and retry to work around adb flakiness for this cmd.
+            time.sleep(0.2)
+            return "0" == self.adb.shell("id -u")
+
+    @property
+    def model(self):
+        """The Android code name for the device."""
+        # If device is in bootloader mode, get mode name from fastboot.
+        if self.is_bootloader:
+            out = self.fastboot.getvar("product").strip()
+            # "out" is never empty because of the "total time" message fastboot
+            # writes to stderr.
+            lines = out.split("\n", 1)
+            if lines:
+                tokens = lines[0].split(" ")
+                if len(tokens) > 1:
+                    return tokens[1].lower()
+            return None
+        model = self.adb.getprop("ro.build.product").lower()
+        if model == "sprout":
+            return model
+        else:
+            return self.adb.getprop("ro.product.name").lower()
+
+    @property
+    def flavor(self):
+        """Returns the specific flavor of Android build the device is using."""
+        return self.adb.getprop("ro.build.flavor").lower()
+
+    @property
+    def droid(self):
+        """Returns the RPC Service of the first Sl4aSession created."""
+        if len(self._sl4a_manager.sessions) > 0:
+            session_id = sorted(self._sl4a_manager.sessions.keys())[0]
+            return self._sl4a_manager.sessions[session_id].rpc_client
+        else:
+            return None
+
+    @property
+    def ed(self):
+        """Returns the event dispatcher of the first Sl4aSession created."""
+        if len(self._sl4a_manager.sessions) > 0:
+            session_id = sorted(self._sl4a_manager.sessions.keys())[0]
+            return self._sl4a_manager.sessions[session_id].get_event_dispatcher()
+        else:
+            return None
+
+    @property
+    def sl4a_sessions(self):
+        """Returns a dictionary of session ids to sessions."""
+        return list(self._sl4a_manager.sessions)
+
+    @property
+    def is_adb_logcat_on(self):
+        """Whether there is an ongoing adb logcat collection."""
+        if self.adb_logcat_process:
+            if self.adb_logcat_process.is_running():
+                return True
+            else:
+                # if skip_sl4a is true, there is no sl4a session
+                # if logcat died due to device reboot and sl4a session has
+                # not restarted there is no droid.
+                if self.droid:
+                    self.droid.logI("Logcat died")
+                self.log.info("Logcat to %s died", self.log_path)
+                return False
+        return False
+
+    @property
+    def device_log_path(self):
+        """Returns the directory for all Android device logs for the current
+        test context and serial.
+        """
+        return context.get_current_context().get_full_output_path(self.serial)
+
+    def update_sdk_api_level(self):
+        self._sdk_api_level = None
+        self.sdk_api_level()
+
+    def load_config(self, config):
+        """Add attributes to the AndroidDevice object based on json config.
+
+        Args:
+            config: A dictionary representing the configs.
+
+        Raises:
+            AndroidDeviceError is raised if the config is trying to overwrite
+            an existing attribute.
+        """
+        for k, v in config.items():
+            # skip_sl4a value can be reset from config file
+            if hasattr(self, k) and k != "skip_sl4a":
+                raise errors.AndroidDeviceError(
+                    f"Attempting to set existing attribute {k} on {self.serial}",
+                    serial=self.serial,
+                )
+            setattr(self, k, v)
+
+    def root_adb(self):
+        """Change adb to root mode for this device if allowed.
+
+        If executed on a production build, adb will not be switched to root
+        mode per security restrictions.
+        """
+        if self.is_adb_root:
+            return
+
+        for attempt in range(ADB_ROOT_RETRY_COUNT):
+            try:
+                self.log.debug(f"Enabling ADB root mode: attempt {attempt}.")
+                self.adb.root()
+            except AdbError:
+                if attempt == ADB_ROOT_RETRY_COUNT:
+                    raise
+                time.sleep(ADB_ROOT_RETRY_INTERVAL)
+        self.adb.wait_for_device()
+
+    def get_droid(self, handle_event=True):
+        """Create an sl4a connection to the device.
+
+        Return the connection handler 'droid'. By default, another connection
+        on the same session is made for EventDispatcher, and the dispatcher is
+        returned to the caller as well.
+        If sl4a server is not started on the device, try to start it.
+
+        Args:
+            handle_event: True if this droid session will need to handle
+                events.
+
+        Returns:
+            droid: Android object used to communicate with sl4a on the android
+                device.
+            ed: An optional EventDispatcher to organize events for this droid.
+
+        Examples:
+            Don't need event handling:
+            >>> ad = AndroidDevice()
+            >>> droid = ad.get_droid(False)
+
+            Need event handling:
+            >>> ad = AndroidDevice()
+            >>> droid, ed = ad.get_droid()
+        """
+        self.log.debug(
+            "Creating RPC client_port={}, forwarded_port={}, server_port={}".format(
+                self.client_port, self.forwarded_port, self.server_port
+            )
+        )
+        session = self._sl4a_manager.create_session(
+            client_port=self.client_port,
+            forwarded_port=self.forwarded_port,
+            server_port=self.server_port,
+        )
+        droid = session.rpc_client
+        if handle_event:
+            ed = session.get_event_dispatcher()
+            return droid, ed
+        return droid
+
+    def get_package_pid(self, package_name):
+        """Gets the pid for a given package. Returns None if not running.
+        Args:
+            package_name: The name of the package.
+        Returns:
+            The first pid found under a given package name. None if no process
+            was found running the package.
+        Raises:
+            AndroidDeviceError if the output of the phone's process list was
+            in an unexpected format.
+        """
+        for cmd in ("ps -A", "ps"):
+            try:
+                out = self.adb.shell(
+                    f'{cmd} | grep "S {package_name}"', ignore_status=True
+                )
+                if package_name not in out:
+                    continue
+                try:
+                    pid = int(out.split()[1])
+                    self.log.info("apk %s has pid %s.", package_name, pid)
+                    return pid
+                except (IndexError, ValueError) as e:
+                    # Possible ValueError from string to int cast.
+                    # Possible IndexError from split.
+                    self.log.warning(
+                        'Command "%s" returned output line: ' '"%s".\nError: %s',
+                        cmd,
+                        out,
+                        e,
+                    )
+            except Exception as e:
+                self.log.warning(
+                    'Device fails to check if %s running with "%s"\n' "Exception %s",
+                    package_name,
+                    cmd,
+                    e,
+                )
+        self.log.debug("apk %s is not running", package_name)
+        return None
+
+    def get_dispatcher(self, droid):
+        """Return an EventDispatcher for an sl4a session
+
+        Args:
+            droid: Session to create EventDispatcher for.
+
+        Returns:
+            ed: An EventDispatcher for specified session.
+        """
+        return self._sl4a_manager.sessions[droid.uid].get_event_dispatcher()
+
+    def _is_timestamp_in_range(self, target, log_begin_time, log_end_time):
+        low = acts_logger.logline_timestamp_comparator(log_begin_time, target) <= 0
+        high = acts_logger.logline_timestamp_comparator(log_end_time, target) >= 0
+        return low and high
+
+    def cat_adb_log(self, tag, begin_time, end_time=None, dest_path="AdbLogExcerpts"):
+        """Takes an excerpt of the adb logcat log from a certain time point to
+        current time.
+
+        Args:
+            tag: An identifier of the time period, usually the name of a test.
+            begin_time: Epoch time of the beginning of the time period.
+            end_time: Epoch time of the ending of the time period, default None
+            dest_path: Destination path of the excerpt file.
+        """
+        log_begin_time = acts_logger.epoch_to_log_line_timestamp(begin_time)
+        if end_time is None:
+            log_end_time = acts_logger.get_log_line_timestamp()
+        else:
+            log_end_time = acts_logger.epoch_to_log_line_timestamp(end_time)
+        self.log.debug("Extracting adb log from logcat.")
+        logcat_path = os.path.join(
+            self.device_log_path, f"adblog_{self.serial}_debug.txt"
+        )
+        if not os.path.exists(logcat_path):
+            self.log.warning(f"Logcat file {logcat_path} does not exist.")
+            return
+        adb_excerpt_dir = os.path.join(self.log_path, dest_path)
+        os.makedirs(adb_excerpt_dir, exist_ok=True)
+        out_name = "%s,%s.txt" % (
+            acts_logger.normalize_log_line_timestamp(log_begin_time),
+            self.serial,
+        )
+        tag_len = utils.MAX_FILENAME_LEN - len(out_name)
+        out_name = f"{tag[:tag_len]},{out_name}"
+        adb_excerpt_path = os.path.join(adb_excerpt_dir, out_name)
+        with open(adb_excerpt_path, "w", encoding="utf-8") as out:
+            in_file = logcat_path
+            with open(in_file, "r", encoding="utf-8", errors="replace") as f:
+                while True:
+                    line = None
+                    try:
+                        line = f.readline()
+                        if not line:
+                            break
+                    except:
+                        continue
+                    line_time = line[: acts_logger.log_line_timestamp_len]
+                    if not acts_logger.is_valid_logline_timestamp(line_time):
+                        continue
+                    if self._is_timestamp_in_range(
+                        line_time, log_begin_time, log_end_time
+                    ):
+                        if not line.endswith("\n"):
+                            line += "\n"
+                        out.write(line)
+        return adb_excerpt_path
+
+    def search_logcat(
+        self, matching_string, begin_time=None, end_time=None, logcat_path=None
+    ):
+        """Search logcat message with given string.
+
+        Args:
+            matching_string: matching_string to search.
+            begin_time: only the lines with time stamps later than begin_time
+                will be searched.
+            end_time: only the lines with time stamps earlier than end_time
+                will be searched.
+            logcat_path: the path of a specific file in which the search should
+                be performed. If None the path will be the default device log
+                path.
+
+        Returns:
+            A list of dictionaries with full log message, time stamp string,
+            time object and message ID. For example:
+            [{"log_message": "05-03 17:39:29.898   968  1001 D"
+                              "ActivityManager: Sending BOOT_COMPLETE user #0",
+              "time_stamp": "2017-05-03 17:39:29.898",
+              "datetime_obj": datetime object,
+              "message_id": None}]
+
+            [{"log_message": "08-12 14:26:42.611043  2360  2510 D RILJ    : "
+                             "[0853]< DEACTIVATE_DATA_CALL  [PHONE0]",
+              "time_stamp": "2020-08-12 14:26:42.611043",
+              "datetime_obj": datetime object},
+              "message_id": "0853"}]
+        """
+        if not logcat_path:
+            logcat_path = os.path.join(
+                self.device_log_path, f"adblog_{self.serial}_debug.txt"
+            )
+        if not os.path.exists(logcat_path):
+            self.log.warning(f"Logcat file {logcat_path} does not exist.")
+            return
+        output = job.run(f"grep '{matching_string}' {logcat_path}", ignore_status=True)
+        if not output.stdout or output.exit_status != 0:
+            return []
+        if begin_time:
+            if not isinstance(begin_time, datetime):
+                log_begin_time = acts_logger.epoch_to_log_line_timestamp(begin_time)
+                begin_time = datetime.strptime(log_begin_time, "%Y-%m-%d %H:%M:%S.%f")
+        if end_time:
+            if not isinstance(end_time, datetime):
+                log_end_time = acts_logger.epoch_to_log_line_timestamp(end_time)
+                end_time = datetime.strptime(log_end_time, "%Y-%m-%d %H:%M:%S.%f")
+        result = []
+        logs = re.findall(r"(\S+\s\S+)(.*)", output.stdout)
+        for log in logs:
+            time_stamp = log[0]
+            time_obj = datetime.strptime(time_stamp, "%Y-%m-%d %H:%M:%S.%f")
+
+            if begin_time and time_obj < begin_time:
+                continue
+
+            if end_time and time_obj > end_time:
+                continue
+
+            res = re.findall(r".*\[(\d+)\]", log[1])
+            try:
+                message_id = res[0]
+            except:
+                message_id = None
+
+            result.append(
+                {
+                    "log_message": "".join(log),
+                    "time_stamp": time_stamp,
+                    "datetime_obj": time_obj,
+                    "message_id": message_id,
+                }
+            )
+        return result
+
+    def start_adb_logcat(self):
+        """Starts a standing adb logcat collection in separate subprocesses and
+        save the logcat in a file.
+        """
+        if self.is_adb_logcat_on:
+            self.log.warning(
+                "Android device %s already has a running adb logcat thread. "
+                % self.serial
+            )
+            return
+        # Disable adb log spam filter. Have to stop and clear settings first
+        # because 'start' doesn't support --clear option before Android N.
+        self.adb.shell("logpersist.stop --clear", ignore_status=True)
+        self.adb.shell("logpersist.start", ignore_status=True)
+        if hasattr(self, "adb_logcat_param"):
+            extra_params = self.adb_logcat_param
+        else:
+            extra_params = "-b all"
+
+        self.adb_logcat_process = logcat.create_logcat_keepalive_process(
+            self.serial, self.log_dir, extra_params
+        )
+        self.adb_logcat_process.start()
+
+    def stop_adb_logcat(self):
+        """Stops the adb logcat collection subprocess."""
+        if not self.is_adb_logcat_on:
+            self.log.warning(
+                f"Android device {self.serial} does not have an ongoing adb logcat "
+            )
+            return
+        # Set the last timestamp to the current timestamp. This may cause
+        # a race condition that allows the same line to be logged twice,
+        # but it does not pose a problem for our logging purposes.
+        self.adb_logcat_process.stop()
+        self.adb_logcat_process = None
+
+    def get_apk_uid(self, apk_name):
+        """Get the uid of the given apk.
+
+        Args:
+        apk_name: Name of the package, e.g., com.android.phone.
+
+        Returns:
+        Linux UID for the apk.
+        """
+        output = self.adb.shell(
+            f"dumpsys package {apk_name} | grep userId=", ignore_status=True
+        )
+        result = re.search(r"userId=(\d+)", output)
+        if result:
+            return result.group(1)
+        else:
+            None
+
+    def get_apk_version(self, package_name):
+        """Get the version of the given apk.
+
+        Args:
+            package_name: Name of the package, e.g., com.android.phone.
+
+        Returns:
+            Version of the given apk.
+        """
+        try:
+            output = self.adb.shell(
+                f"dumpsys package {package_name} | grep versionName"
+            )
+            pattern = re.compile(r"versionName=(.+)", re.I)
+            result = pattern.findall(output)
+            if result:
+                return result[0]
+        except Exception as e:
+            self.log.warning(
+                "Fail to get the version of package %s: %s", package_name, e
+            )
+        self.log.debug("apk %s is not found", package_name)
+        return None
+
+    def is_apk_installed(self, package_name):
+        """Check if the given apk is already installed.
+
+        Args:
+        package_name: Name of the package, e.g., com.android.phone.
+
+        Returns:
+        True if package is installed. False otherwise.
+        """
+
+        try:
+            return bool(
+                self.adb.shell(
+                    f'(pm list packages | grep -w "package:{package_name}") || true'
+                )
+            )
+
+        except Exception as err:
+            self.log.error(
+                "Could not determine if %s is installed. " "Received error:\n%s",
+                package_name,
+                err,
+            )
+            return False
+
+    def is_sl4a_installed(self):
+        return self.is_apk_installed(SL4A_APK_NAME)
+
+    def is_apk_running(self, package_name):
+        """Check if the given apk is running.
+
+        Args:
+            package_name: Name of the package, e.g., com.android.phone.
+
+        Returns:
+        True if package is installed. False otherwise.
+        """
+        for cmd in ("ps -A", "ps"):
+            try:
+                out = self.adb.shell(
+                    f'{cmd} | grep "S {package_name}"', ignore_status=True
+                )
+                if package_name in out:
+                    self.log.info("apk %s is running", package_name)
+                    return True
+            except Exception as e:
+                self.log.warning(
+                    "Device fails to check is %s running by %s " "Exception %s",
+                    package_name,
+                    cmd,
+                    e,
+                )
+                continue
+        self.log.debug("apk %s is not running", package_name)
+        return False
+
+    def is_sl4a_running(self):
+        return self.is_apk_running(SL4A_APK_NAME)
+
+    def force_stop_apk(self, package_name):
+        """Force stop the given apk.
+
+        Args:
+        package_name: Name of the package, e.g., com.android.phone.
+
+        Returns:
+        True if package is installed. False otherwise.
+        """
+        try:
+            self.adb.shell(f"am force-stop {package_name}", ignore_status=True)
+        except Exception as e:
+            self.log.warning("Fail to stop package %s: %s", package_name, e)
+
+    def take_bug_report(self, test_name=None, begin_time=None):
+        """Takes a bug report on the device and stores it in a file.
+
+        Args:
+            test_name: Name of the test case that triggered this bug report.
+            begin_time: Epoch time when the test started. If none is specified,
+                the current time will be used.
+        """
+        self.adb.wait_for_device(timeout=WAIT_FOR_DEVICE_TIMEOUT)
+        new_br = True
+        try:
+            stdout = self.adb.shell("bugreportz -v")
+            # This check is necessary for builds before N, where adb shell's ret
+            # code and stderr are not propagated properly.
+            if "not found" in stdout:
+                new_br = False
+        except AdbError:
+            new_br = False
+        br_path = self.device_log_path
+        os.makedirs(br_path, exist_ok=True)
+        epoch = begin_time if begin_time else utils.get_current_epoch_time()
+        time_stamp = acts_logger.normalize_log_line_timestamp(
+            acts_logger.epoch_to_log_line_timestamp(epoch)
+        )
+        out_name = f"AndroidDevice{self.serial}_{time_stamp}"
+        out_name = f"{out_name}.zip" if new_br else f"{out_name}.txt"
+        full_out_path = os.path.join(br_path, out_name)
+        # in case device restarted, wait for adb interface to return
+        self.wait_for_boot_completion()
+        if test_name:
+            self.log.info("Taking bugreport for %s.", test_name)
+        else:
+            self.log.info("Taking bugreport.")
+        if new_br:
+            out = self.adb.shell("bugreportz", timeout=BUG_REPORT_TIMEOUT)
+            if not out.startswith("OK"):
+                raise errors.AndroidDeviceError(
+                    f"Failed to take bugreport on {self.serial}: {out}",
+                    serial=self.serial,
+                )
+            br_out_path = out.split(":")[1].strip().split()[0]
+            self.adb.pull(f"{br_out_path} {full_out_path}")
+        else:
+            self.adb.bugreport(f" > {full_out_path}", timeout=BUG_REPORT_TIMEOUT)
+        if test_name:
+            self.log.info("Bugreport for %s taken at %s.", test_name, full_out_path)
+        else:
+            self.log.info("Bugreport taken at %s.", test_name, full_out_path)
+        self.adb.wait_for_device(timeout=WAIT_FOR_DEVICE_TIMEOUT)
+
+    def get_file_names(
+        self, directory, begin_time=None, skip_files=[], match_string=None
+    ):
+        """Get files names with provided directory."""
+        cmd = f"find {directory} -type f"
+        if begin_time:
+            current_time = utils.get_current_epoch_time()
+            seconds = int(math.ceil((current_time - begin_time) / 1000.0))
+            cmd = f"{cmd} -mtime -{seconds}s"
+        if match_string:
+            cmd = f"{cmd} -iname {match_string}"
+        for skip_file in skip_files:
+            cmd = f"{cmd} ! -iname {skip_file}"
+        out = self.adb.shell(cmd, ignore_status=True)
+        if (
+            not out
+            or "No such" in out
+            or "Permission denied" in out
+            or "Not a directory" in out
+        ):
+            return []
+        files = out.split("\n")
+        self.log.debug("Find files in directory %s: %s", directory, files)
+        return files
+
+    @property
+    def external_storage_path(self):
+        """
+        The $EXTERNAL_STORAGE path on the device. Most commonly set to '/sdcard'
+        """
+        return self.adb.shell("echo $EXTERNAL_STORAGE")
+
+    def file_exists(self, file_path):
+        """Returns whether a file exists on a device.
+
+        Args:
+            file_path: The path of the file to check for.
+        """
+        cmd = f"(test -f {file_path} && echo yes) || echo no"
+        result = self.adb.shell(cmd)
+        if result == "yes":
+            return True
+        elif result == "no":
+            return False
+        raise ValueError(
+            "Couldn't determine if %s exists. "
+            "Expected yes/no, got %s" % (file_path, result[cmd])
+        )
+
+    def pull_files(self, device_paths, host_path=None):
+        """Pull files from devices.
+
+        Args:
+            device_paths: List of paths on the device to pull from.
+            host_path: Destination path
+        """
+        if isinstance(device_paths, str):
+            device_paths = [device_paths]
+        if not host_path:
+            host_path = self.log_path
+        for device_path in device_paths:
+            self.log.info(f"Pull from device: {device_path} -> {host_path}")
+            self.adb.pull(f"{device_path} {host_path}", timeout=PULL_TIMEOUT)
+
+    def check_crash_report(
+        self, test_name=None, begin_time=None, log_crash_report=False
+    ):
+        """check crash report on the device."""
+        crash_reports = []
+        for crash_path in CRASH_REPORT_PATHS:
+            try:
+                cmd = f"cd {crash_path}"
+                self.adb.shell(cmd)
+            except Exception as e:
+                self.log.debug("received exception %s", e)
+                continue
+            crashes = self.get_file_names(
+                crash_path, skip_files=CRASH_REPORT_SKIPS, begin_time=begin_time
+            )
+            if crash_path == "/data/tombstones/" and crashes:
+                tombstones = crashes[:]
+                for tombstone in tombstones:
+                    if self.adb.shell(
+                        f'cat {tombstone} | grep "crash_dump failed to dump process"'
+                    ):
+                        crashes.remove(tombstone)
+            if crashes:
+                crash_reports.extend(crashes)
+        if crash_reports and log_crash_report:
+            crash_log_path = os.path.join(
+                self.device_log_path, f"Crashes_{self.serial}"
+            )
+            os.makedirs(crash_log_path, exist_ok=True)
+            self.pull_files(crash_reports, crash_log_path)
+        return crash_reports
+
+    def get_qxdm_logs(self, test_name="", begin_time=None):
+        """Get qxdm logs."""
+        # Sleep 10 seconds for the buffered log to be written in qxdm log file
+        time.sleep(10)
+        log_path = getattr(self, "qxdm_log_path", DEFAULT_QXDM_LOG_PATH)
+        qxdm_logs = self.get_file_names(
+            log_path, begin_time=begin_time, match_string="*.qmdl"
+        )
+        if qxdm_logs:
+            qxdm_log_path = os.path.join(self.device_log_path, f"QXDM_{self.serial}")
+            os.makedirs(qxdm_log_path, exist_ok=True)
+
+            self.log.info("Pull QXDM Log %s to %s", qxdm_logs, qxdm_log_path)
+            self.pull_files(qxdm_logs, qxdm_log_path)
+
+            self.adb.pull(
+                f"/firmware/image/qdsp6m.qdb {qxdm_log_path}",
+                timeout=PULL_TIMEOUT,
+                ignore_status=True,
+            )
+            # Zip Folder
+            utils.zip_directory(f"{qxdm_log_path}.zip", qxdm_log_path)
+            shutil.rmtree(qxdm_log_path)
+        else:
+            self.log.error(f"Didn't find QXDM logs in {log_path}.")
+        if "Verizon" in self.adb.getprop("gsm.sim.operator.alpha"):
+            omadm_log_path = os.path.join(self.device_log_path, f"OMADM_{self.serial}")
+            os.makedirs(omadm_log_path, exist_ok=True)
+            self.log.info("Pull OMADM Log")
+            self.adb.pull(
+                f"/data/data/com.android.omadm.service/files/dm/log/ {omadm_log_path}",
+                timeout=PULL_TIMEOUT,
+                ignore_status=True,
+            )
+
+    def get_sdm_logs(self, test_name="", begin_time=None):
+        """Get sdm logs."""
+        # Sleep 10 seconds for the buffered log to be written in sdm log file
+        time.sleep(10)
+        log_paths = [
+            ALWAYS_ON_LOG_PATH,
+            getattr(self, "sdm_log_path", DEFAULT_SDM_LOG_PATH),
+        ]
+        sdm_logs = []
+        for path in log_paths:
+            sdm_logs += self.get_file_names(
+                path, begin_time=begin_time, match_string="*.sdm*"
+            )
+        if sdm_logs:
+            sdm_log_path = os.path.join(self.device_log_path, f"SDM_{self.serial}")
+            os.makedirs(sdm_log_path, exist_ok=True)
+            self.log.info("Pull SDM Log %s to %s", sdm_logs, sdm_log_path)
+            self.pull_files(sdm_logs, sdm_log_path)
+        else:
+            self.log.error(f"Didn't find SDM logs in {log_paths}.")
+        if "Verizon" in self.adb.getprop("gsm.sim.operator.alpha"):
+            omadm_log_path = os.path.join(self.device_log_path, f"OMADM_{self.serial}")
+            os.makedirs(omadm_log_path, exist_ok=True)
+            self.log.info("Pull OMADM Log")
+            self.adb.pull(
+                f"/data/data/com.android.omadm.service/files/dm/log/ {omadm_log_path}",
+                timeout=PULL_TIMEOUT,
+                ignore_status=True,
+            )
+
+    def start_new_session(self, max_connections=None, server_port=None):
+        """Start a new session in sl4a.
+
+        Also caches the droid in a dict with its uid being the key.
+
+        Returns:
+            An Android object used to communicate with sl4a on the android
+                device.
+
+        Raises:
+            Sl4aException: Something is wrong with sl4a and it returned an
+            existing uid to a new session.
+        """
+        session = self._sl4a_manager.create_session(
+            max_connections=max_connections, server_port=server_port
+        )
+
+        self._sl4a_manager.sessions[session.uid] = session
+        return session.rpc_client
+
+    def terminate_all_sessions(self):
+        """Terminate all sl4a sessions on the AndroidDevice instance.
+
+        Terminate all sessions and clear caches.
+        """
+        self._sl4a_manager.terminate_all_sessions()
+
+    def run_iperf_client_nb(
+        self, server_host, extra_args="", timeout=IPERF_TIMEOUT, log_file_path=None
+    ):
+        """Start iperf client on the device asynchronously.
+
+        Return status as true if iperf client start successfully.
+        And data flow information as results.
+
+        Args:
+            server_host: Address of the iperf server.
+            extra_args: A string representing extra arguments for iperf client,
+                e.g. "-i 1 -t 30".
+            log_file_path: The complete file path to log the results.
+
+        """
+        cmd = f"iperf3 -c {server_host} {extra_args}"
+        if log_file_path:
+            cmd += f" --logfile {log_file_path} &"
+        self.adb.shell_nb(cmd)
+
+    def run_iperf_client(self, server_host, extra_args="", timeout=IPERF_TIMEOUT):
+        """Start iperf client on the device.
+
+        Return status as true if iperf client start successfully.
+        And data flow information as results.
+
+        Args:
+            server_host: Address of the iperf server.
+            extra_args: A string representing extra arguments for iperf client,
+                e.g. "-i 1 -t 30".
+
+        Returns:
+            status: true if iperf client start successfully.
+            results: results have data flow information
+        """
+        out = self.adb.shell(f"iperf3 -c {server_host} {extra_args}", timeout=timeout)
+        clean_out = out.split("\n")
+        if "error" in clean_out[0].lower():
+            return False, clean_out
+        return True, clean_out
+
+    def run_iperf_server(self, extra_args=""):
+        """Start iperf server on the device
+
+        Return status as true if iperf server started successfully.
+
+        Args:
+            extra_args: A string representing extra arguments for iperf server.
+
+        Returns:
+            status: true if iperf server started successfully.
+            results: results have output of command
+        """
+        out = self.adb.shell(f"iperf3 -s {extra_args}")
+        clean_out = out.split("\n")
+        if "error" in clean_out[0].lower():
+            return False, clean_out
+        return True, clean_out
+
+    def wait_for_boot_completion(self, timeout=900.0):
+        """Waits for Android framework to broadcast ACTION_BOOT_COMPLETED.
+
+        Args:
+            timeout: Seconds to wait for the device to boot. Default value is
+            15 minutes.
+        """
+        timeout_start = time.time()
+
+        self.log.debug("ADB waiting for device")
+        self.adb.wait_for_device(timeout=timeout)
+        self.log.debug("Waiting for  sys.boot_completed")
+        while time.time() < timeout_start + timeout:
+            try:
+                completed = self.adb.getprop("sys.boot_completed")
+                if completed == "1":
+                    self.log.debug("Device has rebooted")
+                    return
+            except AdbError:
+                # adb shell calls may fail during certain period of booting
+                # process, which is normal. Ignoring these errors.
+                pass
+            time.sleep(5)
+        raise errors.AndroidDeviceError(
+            f"Device {self.serial} booting process timed out.", serial=self.serial
+        )
+
+    def reboot(
+        self, stop_at_lock_screen=False, timeout=180, wait_after_reboot_complete=1
+    ):
+        """Reboots the device.
+
+        Terminate all sl4a sessions, reboot the device, wait for device to
+        complete booting, and restart an sl4a session if restart_sl4a is True.
+
+        Args:
+            stop_at_lock_screen: whether to unlock after reboot. Set to False
+                if want to bring the device to reboot up to password locking
+                phase. Sl4a checking need the device unlocked after rebooting.
+            timeout: time in seconds to wait for the device to complete
+                rebooting.
+            wait_after_reboot_complete: time in seconds to wait after the boot
+                completion.
+        """
+        if self.is_bootloader:
+            self.fastboot.reboot()
+            return
+        self.stop_services()
+        self.log.info("Rebooting")
+        self.adb.reboot()
+
+        timeout_start = time.time()
+        # b/111791239: Newer versions of android sometimes return early after
+        # `adb reboot` is called. This means subsequent calls may make it to
+        # the device before the reboot goes through, return false positives for
+        # getprops such as sys.boot_completed.
+        while time.time() < timeout_start + timeout:
+            try:
+                self.adb.get_state()
+                time.sleep(0.1)
+            except AdbError:
+                # get_state will raise an error if the device is not found. We
+                # want the device to be missing to prove the device has kicked
+                # off the reboot.
+                break
+        self.wait_for_boot_completion(timeout=(timeout - time.time() + timeout_start))
+
+        self.log.debug("Wait for a while after boot completion.")
+        time.sleep(wait_after_reboot_complete)
+        self.root_adb()
+        skip_sl4a = self.skip_sl4a
+        self.skip_sl4a = self.skip_sl4a or stop_at_lock_screen
+        self.start_services()
+        self.skip_sl4a = skip_sl4a
+
+    def restart_runtime(self):
+        """Restarts android runtime.
+
+        Terminate all sl4a sessions, restarts runtime, wait for framework
+        complete restart, and restart an sl4a session if restart_sl4a is True.
+        """
+        self.stop_services()
+        self.log.info("Restarting android runtime")
+        self.adb.shell("stop")
+        # Reset the boot completed flag before we restart the framework
+        # to correctly detect when the framework has fully come up.
+        self.adb.shell("setprop sys.boot_completed 0")
+        self.adb.shell("start")
+        self.wait_for_boot_completion()
+        self.root_adb()
+
+        self.start_services()
+
+    def get_ipv4_address(self, interface="wlan0", timeout=5):
+        for timer in range(0, timeout):
+            try:
+                ip_string = self.adb.shell(f"ifconfig {interface}|grep inet")
+                break
+            except adb.AdbError as e:
+                if timer + 1 == timeout:
+                    self.log.warning(f"Unable to find IP address for {interface}.")
+                    return None
+                else:
+                    time.sleep(1)
+        result = re.search("addr:(.*) Bcast", ip_string)
+        if result != None:
+            ip_address = result.group(1)
+            try:
+                socket.inet_aton(ip_address)
+                return ip_address
+            except socket.error:
+                return None
+        else:
+            return None
+
+    def get_ipv4_gateway(self, timeout=5):
+        for timer in range(0, timeout):
+            try:
+                gateway_string = self.adb.shell("dumpsys wifi | grep mDhcpResults")
+                break
+            except adb.AdbError as e:
+                if timer + 1 == timeout:
+                    self.log.warning("Unable to find gateway")
+                    return None
+                else:
+                    time.sleep(1)
+        result = re.search("Gateway (.*) DNS servers", gateway_string)
+        if result != None:
+            ipv4_gateway = result.group(1)
+            try:
+                socket.inet_aton(ipv4_gateway)
+                return ipv4_gateway
+            except socket.error:
+                return None
+        else:
+            return None
+
+    def send_keycode(self, keycode):
+        self.adb.shell(f"input keyevent KEYCODE_{keycode}")
+
+    def get_my_current_focus_window(self):
+        """Get the current focus window on screen"""
+        output = self.adb.shell(
+            "dumpsys window displays | grep -E mCurrentFocus | grep -v null",
+            ignore_status=True,
+        )
+        if not output or "not found" in output or "Can't find" in output:
+            result = ""
+        else:
+            result = output.split(" ")[-1].strip("}")
+        self.log.debug("Current focus window is %s", result)
+        return result
+
+    def get_my_current_focus_app(self):
+        """Get the current focus application"""
+        dumpsys_cmd = [
+            "dumpsys window | grep -E mFocusedApp",
+            "dumpsys window displays | grep -E mFocusedApp",
+        ]
+        for cmd in dumpsys_cmd:
+            output = self.adb.shell(cmd, ignore_status=True)
+            if (
+                not output
+                or "not found" in output
+                or "Can't find" in output
+                or ("mFocusedApp=null" in output)
+            ):
+                result = ""
+            else:
+                result = output.split(" ")[-2]
+                break
+        self.log.debug("Current focus app is %s", result)
+        return result
+
+    def is_window_ready(self, window_name=None):
+        current_window = self.get_my_current_focus_window()
+        if window_name:
+            return window_name in current_window
+        return current_window and ENCRYPTION_WINDOW not in current_window
+
+    def wait_for_window_ready(
+        self, window_name=None, check_interval=5, check_duration=60
+    ):
+        elapsed_time = 0
+        while elapsed_time < check_duration:
+            if self.is_window_ready(window_name=window_name):
+                return True
+            time.sleep(check_interval)
+            elapsed_time += check_interval
+        self.log.info("Current focus window is %s", self.get_my_current_focus_window())
+        return False
+
+    def is_user_setup_complete(self):
+        return "1" in self.adb.shell("settings get secure user_setup_complete")
+
+    def is_screen_awake(self):
+        """Check if device screen is in sleep mode"""
+        return "Awake" in self.adb.shell("dumpsys power | grep mWakefulness=")
+
+    def is_screen_emergency_dialer(self):
+        """Check if device screen is in emergency dialer mode"""
+        return "EmergencyDialer" in self.get_my_current_focus_window()
+
+    def is_screen_in_call_activity(self):
+        """Check if device screen is in in-call activity notification"""
+        return "InCallActivity" in self.get_my_current_focus_window()
+
+    def is_setupwizard_on(self):
+        """Check if device screen is in emergency dialer mode"""
+        return "setupwizard" in self.get_my_current_focus_app()
+
+    def is_screen_lock_enabled(self):
+        """Check if screen lock is enabled"""
+        cmd = "dumpsys window policy | grep showing="
+        out = self.adb.shell(cmd, ignore_status=True)
+        return "true" in out
+
+    def is_waiting_for_unlock_pin(self):
+        """Check if device is waiting for unlock pin to boot up"""
+        current_window = self.get_my_current_focus_window()
+        current_app = self.get_my_current_focus_app()
+        if ENCRYPTION_WINDOW in current_window:
+            self.log.info("Device is in CrpytKeeper window")
+            return True
+        if "StatusBar" in current_window and (
+            (not current_app) or "FallbackHome" in current_app
+        ):
+            self.log.info("Device is locked")
+            return True
+        return False
+
+    def ensure_screen_on(self):
+        """Ensure device screen is powered on"""
+        if self.is_screen_lock_enabled():
+            for _ in range(2):
+                self.unlock_screen()
+                time.sleep(1)
+                if self.is_waiting_for_unlock_pin():
+                    self.unlock_screen(password=DEFAULT_DEVICE_PASSWORD)
+                    time.sleep(1)
+                if (
+                    not self.is_waiting_for_unlock_pin()
+                    and self.wait_for_window_ready()
+                ):
+                    return True
+            return False
+        else:
+            self.wakeup_screen()
+            return True
+
+    def wakeup_screen(self):
+        if not self.is_screen_awake():
+            self.log.info("Screen is not awake, wake it up")
+            self.send_keycode("WAKEUP")
+
+    def go_to_sleep(self):
+        if self.is_screen_awake():
+            self.send_keycode("SLEEP")
+
+    def send_keycode_number_pad(self, number):
+        self.send_keycode(f"NUMPAD_{number}")
+
+    def unlock_screen(self, password=None):
+        self.log.info("Unlocking with %s", password or "swipe up")
+        # Bring device to SLEEP so that unlock process can start fresh
+        self.send_keycode("SLEEP")
+        time.sleep(1)
+        self.send_keycode("WAKEUP")
+        if ENCRYPTION_WINDOW not in self.get_my_current_focus_app():
+            self.send_keycode("MENU")
+        if password:
+            self.send_keycode("DEL")
+            for number in password:
+                self.send_keycode_number_pad(number)
+            self.send_keycode("ENTER")
+            self.send_keycode("BACK")
+
+    def screenshot(self, name=""):
+        """Take a screenshot on the device.
+
+        Args:
+            name: additional information of screenshot on the file name.
+        """
+        if name:
+            file_name = f"{DEFAULT_SCREENSHOT_PATH}_{name}"
+        file_name = f"{file_name}_{utils.get_current_epoch_time()}.png"
+        self.ensure_screen_on()
+        self.log.info("Log screenshot to %s", file_name)
+        try:
+            self.adb.shell(f"screencap -p {file_name}")
+        except:
+            self.log.error("Fail to log screenshot to %s", file_name)
+
+    def exit_setup_wizard(self):
+        # Handling Android TV's setupwizard is ignored for now.
+        if "feature:android.hardware.type.television" in self.adb.shell(
+            "pm list features"
+        ):
+            return
+        if not self.is_user_setup_complete() or self.is_setupwizard_on():
+            # b/116709539 need this to prevent reboot after skip setup wizard
+            self.adb.shell(
+                "am start -a com.android.setupwizard.EXIT", ignore_status=True
+            )
+            self.adb.shell(
+                f"pm disable {self.get_setupwizard_package_name()}",
+                ignore_status=True,
+            )
+        # Wait up to 5 seconds for user_setup_complete to be updated
+        end_time = time.time() + 5
+        while time.time() < end_time:
+            if self.is_user_setup_complete() or not self.is_setupwizard_on():
+                return
+
+        # If fail to exit setup wizard, set local.prop and reboot
+        if not self.is_user_setup_complete() and self.is_setupwizard_on():
+            self.adb.shell("echo ro.test_harness=1 > /data/local.prop")
+            self.adb.shell("chmod 644 /data/local.prop")
+            self.reboot(stop_at_lock_screen=True)
+
+    def get_setupwizard_package_name(self):
+        """Finds setupwizard package/.activity
+
+        Bypass setupwizard or setupwraith depending on device.
+
+         Returns:
+            packageName/.ActivityName
+        """
+        packages_to_skip = "'setupwizard|setupwraith'"
+        android_package_name = "com.google.android"
+        package = self.adb.shell(
+            "pm list packages -f | grep -E {} | grep {}".format(
+                packages_to_skip, android_package_name
+            )
+        )
+        wizard_package = package.split("=")[1]
+        activity = package.split("=")[0].split("/")[-2]
+        self.log.info(f"{wizard_package}/.{activity}Activity")
+        return f"{wizard_package}/.{activity}Activity"
+
+    def push_system_file(self, src_file_path, dst_file_path, push_timeout=300):
+        """Pushes a file onto the read-only file system.
+
+        For speed, the device is left in root mode after this call, and leaves
+        verity disabled. To re-enable verity, call ensure_verity_enabled().
+
+        Args:
+            src_file_path: The path to the system app to install.
+            dst_file_path: The destination of the file.
+            push_timeout: How long to wait for the push to finish.
+        Returns:
+            Whether or not the install was successful.
+        """
+        self.adb.ensure_root()
+        try:
+            self.ensure_verity_disabled()
+            self.adb.remount()
+            out = self.adb.push(
+                f"{src_file_path} {dst_file_path}", timeout=push_timeout
+            )
+            if "error" in out:
+                self.log.error(
+                    "Unable to push system file %s to %s due to %s",
+                    src_file_path,
+                    dst_file_path,
+                    out,
+                )
+                return False
+            return True
+        except Exception as e:
+            self.log.error(
+                "Unable to push system file %s to %s due to %s",
+                src_file_path,
+                dst_file_path,
+                e,
+            )
+            return False
+
+    def ensure_verity_enabled(self):
+        """Ensures that verity is enabled.
+
+        If verity is not enabled, this call will reboot the phone. Note that
+        this only works on debuggable builds.
+        """
+        user = self.adb.get_user_id()
+        # The below properties will only exist if verity has been enabled.
+        system_verity = self.adb.getprop("partition.system.verified")
+        vendor_verity = self.adb.getprop("partition.vendor.verified")
+        if not system_verity or not vendor_verity:
+            self.adb.ensure_root()
+            self.adb.enable_verity()
+            self.reboot()
+            self.adb.ensure_user(user)
+
+    def ensure_verity_disabled(self):
+        """Ensures that verity is disabled.
+
+        If verity is enabled, this call will reboot the phone.
+        """
+        user = self.adb.get_user_id()
+        # The below properties will only exist if verity has been enabled.
+        system_verity = self.adb.getprop("partition.system.verified")
+        vendor_verity = self.adb.getprop("partition.vendor.verified")
+        if system_verity or vendor_verity:
+            self.adb.ensure_root()
+            self.adb.disable_verity()
+            self.reboot()
+            self.adb.ensure_user(user)
+
+
+class AndroidDeviceLoggerAdapter(logging.LoggerAdapter):
+    def process(self, msg, kwargs):
+        msg = f"[AndroidDevice|{self.extra['serial']}] {msg}"
+        return (msg, kwargs)
diff --git a/src/antlion/controllers/android_lib/__init__.py b/packages/antlion/controllers/android_lib/__init__.py
similarity index 100%
rename from src/antlion/controllers/android_lib/__init__.py
rename to packages/antlion/controllers/android_lib/__init__.py
diff --git a/src/antlion/controllers/android_lib/errors.py b/packages/antlion/controllers/android_lib/errors.py
similarity index 100%
rename from src/antlion/controllers/android_lib/errors.py
rename to packages/antlion/controllers/android_lib/errors.py
diff --git a/src/antlion/controllers/android_lib/events.py b/packages/antlion/controllers/android_lib/events.py
similarity index 100%
rename from src/antlion/controllers/android_lib/events.py
rename to packages/antlion/controllers/android_lib/events.py
diff --git a/packages/antlion/controllers/android_lib/logcat.py b/packages/antlion/controllers/android_lib/logcat.py
new file mode 100644
index 0000000..4aab7d0
--- /dev/null
+++ b/packages/antlion/controllers/android_lib/logcat.py
@@ -0,0 +1,104 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import re
+
+from antlion.libs.logging import log_stream
+from antlion.libs.logging.log_stream import LogStyles
+from antlion.libs.proc.process import Process
+
+TIMESTAMP_REGEX = r"((?:\d+-)?\d+-\d+ \d+:\d+:\d+.\d+)"
+
+
+class TimestampTracker(object):
+    """Stores the last timestamp outputted by the Logcat process."""
+
+    def __init__(self):
+        self._last_timestamp = None
+
+    @property
+    def last_timestamp(self):
+        return self._last_timestamp
+
+    def read_output(self, message):
+        """Reads the message and parses all timestamps from it."""
+        all_timestamps = re.findall(TIMESTAMP_REGEX, message)
+        if len(all_timestamps) > 0:
+            self._last_timestamp = all_timestamps[0]
+
+
+def _get_log_level(message):
+    """Returns the log level for the given message."""
+    if message.startswith("-") or len(message) < 37:
+        return logging.ERROR
+    else:
+        log_level = message[36]
+        if log_level in ("V", "D"):
+            return logging.DEBUG
+        elif log_level == "I":
+            return logging.INFO
+        elif log_level == "W":
+            return logging.WARNING
+        elif log_level == "E":
+            return logging.ERROR
+    return logging.NOTSET
+
+
+def _log_line_func(log, timestamp_tracker):
+    """Returns a lambda that logs a message to the given logger."""
+
+    def log_line(message):
+        timestamp_tracker.read_output(message)
+        log.log(_get_log_level(message), message)
+
+    return log_line
+
+
+def _on_retry(serial, extra_params, timestamp_tracker):
+    def on_retry(_):
+        begin_at = '"%s"' % (timestamp_tracker.last_timestamp or 1)
+        additional_params = extra_params or ""
+
+        return f"adb -s {serial} logcat -T {begin_at} -v year {additional_params}"
+
+    return on_retry
+
+
+def create_logcat_keepalive_process(serial, logcat_dir, extra_params=""):
+    """Creates a Logcat Process that automatically attempts to reconnect.
+
+    Args:
+        serial: The serial of the device to read the logcat of.
+        logcat_dir: The directory used for logcat file output.
+        extra_params: Any additional params to be added to the logcat cmdline.
+
+    Returns:
+        A acts.libs.proc.process.Process object.
+    """
+    logger = log_stream.create_logger(
+        f"adblog_{serial}",
+        log_name=serial,
+        subcontext=logcat_dir,
+        log_styles=(LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG),
+    )
+    process = Process(f"adb -s {serial} logcat -T 1 -v year {extra_params}")
+    timestamp_tracker = TimestampTracker()
+    process.set_on_output_callback(_log_line_func(logger, timestamp_tracker))
+    process.set_on_terminate_callback(
+        _on_retry(serial, extra_params, timestamp_tracker)
+    )
+    return process
diff --git a/packages/antlion/controllers/android_lib/services.py b/packages/antlion/controllers/android_lib/services.py
new file mode 100644
index 0000000..098f524
--- /dev/null
+++ b/packages/antlion/controllers/android_lib/services.py
@@ -0,0 +1,118 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from antlion.controllers.android_lib import errors
+from antlion.controllers.android_lib import events as android_events
+from antlion.event import event_bus
+
+
+class AndroidService(object):
+    """The base class for Android long-running services.
+
+    The _start method is registered to an AndroidStartServicesEvent, and
+    the _stop method is registered to an AndroidStopServicesEvent.
+
+    Attributes:
+        ad: The AndroidDevice instance associated with the service.
+        serial: The serial of the device.
+        _registration_ids: List of registration IDs for the event subscriptions.
+    """
+
+    def __init__(self, ad):
+        self.ad = ad
+        self._registration_ids = []
+
+    @property
+    def serial(self):
+        return self.ad.serial
+
+    def register(self):
+        """Registers the _start and _stop methods to their corresponding
+        events.
+        """
+
+        def check_serial(event):
+            return self.serial == event.ad.serial
+
+        self._registration_ids = [
+            event_bus.register(
+                android_events.AndroidStartServicesEvent,
+                self._start,
+                filter_fn=check_serial,
+            ),
+            event_bus.register(
+                android_events.AndroidStopServicesEvent,
+                self._stop,
+                filter_fn=check_serial,
+            ),
+        ]
+
+    def unregister(self):
+        """Unregisters all subscriptions in this service."""
+        event_bus.unregister_all(from_list=self._registration_ids)
+        self._registration_ids.clear()
+
+    def _start(self, start_event):
+        """Start the service. Called upon an AndroidStartServicesEvent.
+
+        Args:
+            start_event: The AndroidStartServicesEvent instance.
+        """
+        raise NotImplementedError
+
+    def _stop(self, stop_event):
+        """Stop the service. Called upon an AndroidStopServicesEvent.
+
+        Args:
+            stop_event: The AndroidStopServicesEvent instance.
+        """
+        raise NotImplementedError
+
+
+class AdbLogcatService(AndroidService):
+    """Service for adb logcat."""
+
+    def _start(self, _):
+        self.ad.start_adb_logcat()
+
+    def _stop(self, _):
+        self.ad.stop_adb_logcat()
+
+
+class Sl4aService(AndroidService):
+    """Service for SL4A."""
+
+    def _start(self, start_event):
+        if self.ad.skip_sl4a:
+            return
+
+        if not self.ad.is_sl4a_installed():
+            self.ad.log.error("sl4a.apk is not installed")
+            raise errors.AndroidDeviceError(
+                "The required sl4a.apk is not installed", serial=self.serial
+            )
+        if not self.ad.ensure_screen_on():
+            self.ad.log.error("User window cannot come up")
+            raise errors.AndroidDeviceError(
+                "User window cannot come up", serial=self.serial
+            )
+
+        droid, ed = self.ad.get_droid()
+        ed.start()
+
+    def _stop(self, _):
+        self.ad.terminate_all_sessions()
+        self.ad._sl4a_manager.stop_service()
diff --git a/src/antlion/controllers/android_lib/tel/__init__.py b/packages/antlion/controllers/android_lib/tel/__init__.py
similarity index 100%
rename from src/antlion/controllers/android_lib/tel/__init__.py
rename to packages/antlion/controllers/android_lib/tel/__init__.py
diff --git a/packages/antlion/controllers/android_lib/tel/tel_utils.py b/packages/antlion/controllers/android_lib/tel/tel_utils.py
new file mode 100644
index 0000000..745189d
--- /dev/null
+++ b/packages/antlion/controllers/android_lib/tel/tel_utils.py
@@ -0,0 +1,690 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Generic telephony utility functions. Cloned from test_utils.tel."""
+
+import re
+import struct
+import time
+from queue import Empty
+
+from antlion.controllers.adb_lib.error import AdbCommandError
+from antlion.logger import epoch_to_log_line_timestamp
+
+INCALL_UI_DISPLAY_FOREGROUND = "foreground"
+INCALL_UI_DISPLAY_BACKGROUND = "background"
+INCALL_UI_DISPLAY_DEFAULT = "default"
+
+# Max time to wait after caller make a call and before
+# callee start ringing
+MAX_WAIT_TIME_ACCEPT_CALL_TO_OFFHOOK_EVENT = 30
+
+# Max time to wait after toggle airplane mode and before
+# get expected event
+MAX_WAIT_TIME_AIRPLANEMODE_EVENT = 90
+
+# Wait time between state check retry
+WAIT_TIME_BETWEEN_STATE_CHECK = 5
+
+# Constant for Data Roaming State
+DATA_ROAMING_ENABLE = 1
+DATA_ROAMING_DISABLE = 0
+
+# Constant for Telephony Manager Call State
+TELEPHONY_STATE_RINGING = "RINGING"
+TELEPHONY_STATE_IDLE = "IDLE"
+TELEPHONY_STATE_OFFHOOK = "OFFHOOK"
+TELEPHONY_STATE_UNKNOWN = "UNKNOWN"
+
+# Constant for Service State
+SERVICE_STATE_EMERGENCY_ONLY = "EMERGENCY_ONLY"
+SERVICE_STATE_IN_SERVICE = "IN_SERVICE"
+SERVICE_STATE_OUT_OF_SERVICE = "OUT_OF_SERVICE"
+SERVICE_STATE_POWER_OFF = "POWER_OFF"
+SERVICE_STATE_UNKNOWN = "UNKNOWN"
+
+# Constant for Network Mode
+NETWORK_MODE_GSM_ONLY = "NETWORK_MODE_GSM_ONLY"
+NETWORK_MODE_WCDMA_ONLY = "NETWORK_MODE_WCDMA_ONLY"
+NETWORK_MODE_LTE_ONLY = "NETWORK_MODE_LTE_ONLY"
+
+# Constant for Events
+EVENT_CALL_STATE_CHANGED = "CallStateChanged"
+EVENT_SERVICE_STATE_CHANGED = "ServiceStateChanged"
+
+
+class CallStateContainer:
+    INCOMING_NUMBER = "incomingNumber"
+    SUBSCRIPTION_ID = "subscriptionId"
+    CALL_STATE = "callState"
+
+
+class ServiceStateContainer:
+    VOICE_REG_STATE = "voiceRegState"
+    VOICE_NETWORK_TYPE = "voiceNetworkType"
+    DATA_REG_STATE = "dataRegState"
+    DATA_NETWORK_TYPE = "dataNetworkType"
+    OPERATOR_NAME = "operatorName"
+    OPERATOR_ID = "operatorId"
+    IS_MANUAL_NW_SELECTION = "isManualNwSelection"
+    ROAMING = "roaming"
+    IS_EMERGENCY_ONLY = "isEmergencyOnly"
+    NETWORK_ID = "networkId"
+    SYSTEM_ID = "systemId"
+    SUBSCRIPTION_ID = "subscriptionId"
+    SERVICE_STATE = "serviceState"
+
+
+def dumpsys_last_call_info(ad):
+    """Get call information by dumpsys telecom."""
+    num = dumpsys_last_call_number(ad)
+    output = ad.adb.shell("dumpsys telecom")
+    result = re.search(r"Call TC@%s: {(.*?)}" % num, output, re.DOTALL)
+    call_info = {"TC": num}
+    if result:
+        result = result.group(1)
+        for attr in (
+            "startTime",
+            "endTime",
+            "direction",
+            "isInterrupted",
+            "callTechnologies",
+            "callTerminationsReason",
+            "isVideoCall",
+            "callProperties",
+        ):
+            match = re.search(r"%s: (.*)" % attr, result)
+            if match:
+                if attr in ("startTime", "endTime"):
+                    call_info[attr] = epoch_to_log_line_timestamp(int(match.group(1)))
+                else:
+                    call_info[attr] = match.group(1)
+    ad.log.debug("call_info = %s", call_info)
+    return call_info
+
+
+def dumpsys_last_call_number(ad):
+    output = ad.adb.shell("dumpsys telecom")
+    call_nums = re.findall("Call TC@(\d+):", output)
+    if not call_nums:
+        return 0
+    else:
+        return int(call_nums[-1])
+
+
+def get_device_epoch_time(ad):
+    return int(1000 * float(ad.adb.shell("date +%s.%N")))
+
+
+def get_outgoing_voice_sub_id(ad):
+    """Get outgoing voice subscription id"""
+    if hasattr(ad, "outgoing_voice_sub_id"):
+        return ad.outgoing_voice_sub_id
+    else:
+        return ad.droid.subscriptionGetDefaultVoiceSubId()
+
+
+def get_rx_tx_power_levels(log, ad):
+    """Obtains Rx and Tx power levels from the MDS application.
+
+    The method requires the MDS app to be installed in the DUT.
+
+    Args:
+        log: logger object
+        ad: an android device
+
+    Return:
+        A tuple where the first element is an array array with the RSRP value
+        in Rx chain, and the second element is the transmitted power in dBm.
+        Values for invalid Rx / Tx chains are set to None.
+    """
+    cmd = (
+        'am instrument -w -e request "80 00 e8 03 00 08 00 00 00" -e '
+        'response wait "com.google.mdstest/com.google.mdstest.instrument.'
+        'ModemCommandInstrumentation"'
+    )
+    try:
+        output = ad.adb.shell(cmd)
+    except AdbCommandError as e:
+        log.error(e)
+        output = None
+
+    if not output or "result=SUCCESS" not in output:
+        raise RuntimeError(
+            "Could not obtain Tx/Rx power levels from MDS. Is " "the MDS app installed?"
+        )
+
+    response = re.search(r"(?<=response=).+", output)
+
+    if not response:
+        raise RuntimeError(f"Invalid response from the MDS app:\n{output}")
+
+    # Obtain a list of bytes in hex format from the response string
+    response_hex = response.group(0).split(" ")
+
+    def get_bool(pos):
+        """Obtain a boolean variable from the byte array."""
+        return response_hex[pos] == "01"
+
+    def get_int32(pos):
+        """Obtain an int from the byte array. Bytes are printed in
+        little endian format."""
+        return struct.unpack(
+            "<i", bytearray.fromhex("".join(response_hex[pos : pos + 4]))
+        )[0]
+
+    rx_power = []
+    RX_CHAINS = 4
+
+    for i in range(RX_CHAINS):
+        # Calculate starting position for the Rx chain data structure
+        start = 12 + i * 22
+
+        # The first byte in the data structure indicates if the rx chain is
+        # valid.
+        if get_bool(start):
+            rx_power.append(get_int32(start + 2) / 10)
+        else:
+            rx_power.append(None)
+
+    # Calculate the position for the tx chain data structure
+    tx_pos = 12 + RX_CHAINS * 22
+
+    tx_valid = get_bool(tx_pos)
+    if tx_valid:
+        tx_power = get_int32(tx_pos + 2) / -10
+    else:
+        tx_power = None
+
+    return rx_power, tx_power
+
+
+def get_telephony_signal_strength(ad):
+    # {'evdoEcio': -1, 'asuLevel': 28, 'lteSignalStrength': 14, 'gsmLevel': 0,
+    # 'cdmaAsuLevel': 99, 'evdoDbm': -120, 'gsmDbm': -1, 'cdmaEcio': -160,
+    # 'level': 2, 'lteLevel': 2, 'cdmaDbm': -120, 'dbm': -112, 'cdmaLevel': 0,
+    # 'lteAsuLevel': 28, 'gsmAsuLevel': 99, 'gsmBitErrorRate': 0,
+    # 'lteDbm': -112, 'gsmSignalStrength': 99}
+    try:
+        signal_strength = ad.droid.telephonyGetSignalStrength()
+        if not signal_strength:
+            signal_strength = {}
+    except Exception as e:
+        ad.log.error(e)
+        signal_strength = {}
+    return signal_strength
+
+
+def initiate_call(
+    log,
+    ad,
+    callee_number,
+    emergency=False,
+    incall_ui_display=INCALL_UI_DISPLAY_FOREGROUND,
+    video=False,
+):
+    """Make phone call from caller to callee.
+
+    Args:
+        log: log object.
+        ad: Caller android device object.
+        callee_number: Callee phone number.
+        emergency : specify the call is emergency.
+            Optional. Default value is False.
+        incall_ui_display: show the dialer UI foreground or background
+        video: whether to initiate as video call
+
+    Returns:
+        result: if phone call is placed successfully.
+    """
+    ad.ed.clear_events(EVENT_CALL_STATE_CHANGED)
+    sub_id = get_outgoing_voice_sub_id(ad)
+    begin_time = get_device_epoch_time(ad)
+    ad.droid.telephonyStartTrackingCallStateForSubscription(sub_id)
+    try:
+        # Make a Call
+        ad.log.info("Make a phone call to %s", callee_number)
+        if emergency:
+            ad.droid.telecomCallEmergencyNumber(callee_number)
+        else:
+            ad.droid.telecomCallNumber(callee_number, video)
+
+        # Verify OFFHOOK state
+        if not wait_for_call_offhook_for_subscription(
+            log, ad, sub_id, event_tracking_started=True
+        ):
+            ad.log.info("sub_id %s not in call offhook state", sub_id)
+            last_call_drop_reason(ad, begin_time=begin_time)
+            return False
+        else:
+            return True
+    finally:
+        if hasattr(ad, "sdm_log") and getattr(ad, "sdm_log"):
+            ad.adb.shell("i2cset -fy 3 64 6 1 b", ignore_status=True)
+            ad.adb.shell("i2cset -fy 3 65 6 1 b", ignore_status=True)
+        ad.droid.telephonyStopTrackingCallStateChangeForSubscription(sub_id)
+        if incall_ui_display == INCALL_UI_DISPLAY_FOREGROUND:
+            ad.droid.telecomShowInCallScreen()
+        elif incall_ui_display == INCALL_UI_DISPLAY_BACKGROUND:
+            ad.droid.showHomeScreen()
+
+
+def is_event_match(event, field, value):
+    """Return if <field> in "event" match <value> or not.
+
+    Args:
+        event: event to test. This event need to have <field>.
+        field: field to match.
+        value: value to match.
+
+    Returns:
+        True if <field> in "event" match <value>.
+        False otherwise.
+    """
+    return is_event_match_for_list(event, field, [value])
+
+
+def is_event_match_for_list(event, field, value_list):
+    """Return if <field> in "event" match any one of the value
+        in "value_list" or not.
+
+    Args:
+        event: event to test. This event need to have <field>.
+        field: field to match.
+        value_list: a list of value to match.
+
+    Returns:
+        True if <field> in "event" match one of the value in "value_list".
+        False otherwise.
+    """
+    try:
+        value_in_event = event["data"][field]
+    except KeyError:
+        return False
+    for value in value_list:
+        if value_in_event == value:
+            return True
+    return False
+
+
+def is_phone_in_call(log, ad):
+    """Return True if phone in call.
+
+    Args:
+        log: log object.
+        ad:  android device.
+    """
+    try:
+        return ad.droid.telecomIsInCall()
+    except:
+        return "mCallState=2" in ad.adb.shell(
+            "dumpsys telephony.registry | grep mCallState"
+        )
+
+
+def last_call_drop_reason(ad, begin_time=None):
+    reasons = ad.search_logcat(
+        "qcril_qmi_voice_map_qmi_to_ril_last_call_failure_cause", begin_time
+    )
+    reason_string = ""
+    if reasons:
+        log_msg = "Logcat call drop reasons:"
+        for reason in reasons:
+            log_msg = f"{log_msg}\n\t{reason['log_message']}"
+            if "ril reason str" in reason["log_message"]:
+                reason_string = reason["log_message"].split(":")[-1].strip()
+        ad.log.info(log_msg)
+    reasons = ad.search_logcat("ACTION_FORBIDDEN_NO_SERVICE_AUTHORIZATION", begin_time)
+    if reasons:
+        ad.log.warning("ACTION_FORBIDDEN_NO_SERVICE_AUTHORIZATION is seen")
+    ad.log.info("last call dumpsys: %s", sorted(dumpsys_last_call_info(ad).items()))
+    return reason_string
+
+
+def toggle_airplane_mode(log, ad, new_state=None, strict_checking=True):
+    """Toggle the state of airplane mode.
+
+    Args:
+        log: log handler.
+        ad: android_device object.
+        new_state: Airplane mode state to set to.
+            If None, opposite of the current state.
+        strict_checking: Whether to turn on strict checking that checks all features.
+
+    Returns:
+        result: True if operation succeed. False if error happens.
+    """
+    if ad.skip_sl4a:
+        return toggle_airplane_mode_by_adb(log, ad, new_state)
+    else:
+        return toggle_airplane_mode_msim(
+            log, ad, new_state, strict_checking=strict_checking
+        )
+
+
+def toggle_airplane_mode_by_adb(log, ad, new_state=None):
+    """Toggle the state of airplane mode.
+
+    Args:
+        log: log handler.
+        ad: android_device object.
+        new_state: Airplane mode state to set to.
+            If None, opposite of the current state.
+
+    Returns:
+        result: True if operation succeed. False if error happens.
+    """
+    cur_state = bool(int(ad.adb.shell("settings get global airplane_mode_on")))
+    if new_state == cur_state:
+        ad.log.info("Airplane mode already in %s", new_state)
+        return True
+    elif new_state is None:
+        new_state = not cur_state
+    ad.log.info("Change airplane mode from %s to %s", cur_state, new_state)
+    try:
+        ad.adb.shell(f"settings put global airplane_mode_on {int(new_state)}")
+        ad.adb.shell("am broadcast -a android.intent.action.AIRPLANE_MODE")
+    except Exception as e:
+        ad.log.error(e)
+        return False
+    changed_state = bool(int(ad.adb.shell("settings get global airplane_mode_on")))
+    return changed_state == new_state
+
+
+def toggle_airplane_mode_msim(log, ad, new_state=None, strict_checking=True):
+    """Toggle the state of airplane mode.
+
+    Args:
+        log: log handler.
+        ad: android_device object.
+        new_state: Airplane mode state to set to.
+            If None, opposite of the current state.
+        strict_checking: Whether to turn on strict checking that checks all features.
+
+    Returns:
+        result: True if operation succeed. False if error happens.
+    """
+
+    cur_state = ad.droid.connectivityCheckAirplaneMode()
+    if cur_state == new_state:
+        ad.log.info("Airplane mode already in %s", new_state)
+        return True
+    elif new_state is None:
+        new_state = not cur_state
+        ad.log.info("Toggle APM mode, from current tate %s to %s", cur_state, new_state)
+    sub_id_list = []
+    active_sub_info = ad.droid.subscriptionGetAllSubInfoList()
+    if active_sub_info:
+        for info in active_sub_info:
+            sub_id_list.append(info["subscriptionId"])
+
+    ad.ed.clear_all_events()
+    time.sleep(0.1)
+    service_state_list = []
+    if new_state:
+        service_state_list.append(SERVICE_STATE_POWER_OFF)
+        ad.log.info("Turn on airplane mode")
+
+    else:
+        # If either one of these 3 events show up, it should be OK.
+        # Normal SIM, phone in service
+        service_state_list.append(SERVICE_STATE_IN_SERVICE)
+        # NO SIM, or Dead SIM, or no Roaming coverage.
+        service_state_list.append(SERVICE_STATE_OUT_OF_SERVICE)
+        service_state_list.append(SERVICE_STATE_EMERGENCY_ONLY)
+        ad.log.info("Turn off airplane mode")
+
+    for sub_id in sub_id_list:
+        ad.droid.telephonyStartTrackingServiceStateChangeForSubscription(sub_id)
+
+    timeout_time = time.time() + MAX_WAIT_TIME_AIRPLANEMODE_EVENT
+    ad.droid.connectivityToggleAirplaneMode(new_state)
+
+    try:
+        try:
+            event = ad.ed.wait_for_event(
+                EVENT_SERVICE_STATE_CHANGED,
+                is_event_match_for_list,
+                timeout=MAX_WAIT_TIME_AIRPLANEMODE_EVENT,
+                field=ServiceStateContainer.SERVICE_STATE,
+                value_list=service_state_list,
+            )
+            ad.log.info("Got event %s", event)
+        except Empty:
+            ad.log.warning(
+                "Did not get expected service state change to %s", service_state_list
+            )
+        finally:
+            for sub_id in sub_id_list:
+                ad.droid.telephonyStopTrackingServiceStateChangeForSubscription(sub_id)
+    except Exception as e:
+        ad.log.error(e)
+
+    # APM on (new_state=True) will turn off bluetooth but may not turn it on
+    try:
+        if new_state and not _wait_for_bluetooth_in_state(
+            log, ad, False, timeout_time - time.time()
+        ):
+            ad.log.error("Failed waiting for bluetooth during airplane mode toggle")
+            if strict_checking:
+                return False
+    except Exception as e:
+        ad.log.error("Failed to check bluetooth state due to %s", e)
+        if strict_checking:
+            raise
+
+    # APM on (new_state=True) will turn off wifi but may not turn it on
+    if new_state and not _wait_for_wifi_in_state(
+        log, ad, False, timeout_time - time.time()
+    ):
+        ad.log.error("Failed waiting for wifi during airplane mode toggle on")
+        if strict_checking:
+            return False
+
+    if ad.droid.connectivityCheckAirplaneMode() != new_state:
+        ad.log.error("Set airplane mode to %s failed", new_state)
+        return False
+    return True
+
+
+def toggle_cell_data_roaming(ad, state):
+    """Enable cell data roaming for default data subscription.
+
+    Wait for the data roaming status to be DATA_STATE_CONNECTED
+        or DATA_STATE_DISCONNECTED.
+
+    Args:
+        ad: Android Device Object.
+        state: True or False for enable or disable cell data roaming.
+
+    Returns:
+        True if success.
+        False if failed.
+    """
+    state_int = {True: DATA_ROAMING_ENABLE, False: DATA_ROAMING_DISABLE}[state]
+    action_str = {True: "Enable", False: "Disable"}[state]
+    if ad.droid.connectivityCheckDataRoamingMode() == state:
+        ad.log.info("Data roaming is already in state %s", state)
+        return True
+    if not ad.droid.connectivitySetDataRoaming(state_int):
+        ad.error.info("Fail to config data roaming into state %s", state)
+        return False
+    if ad.droid.connectivityCheckDataRoamingMode() == state:
+        ad.log.info("Data roaming is configured into state %s", state)
+        return True
+    else:
+        ad.log.error("Data roaming is not configured into state %s", state)
+        return False
+
+
+def wait_for_call_offhook_event(
+    log,
+    ad,
+    sub_id,
+    event_tracking_started=False,
+    timeout=MAX_WAIT_TIME_ACCEPT_CALL_TO_OFFHOOK_EVENT,
+):
+    """Wait for an incoming call on specified subscription.
+
+    Args:
+        log: log object.
+        ad: android device object.
+        event_tracking_started: True if event tracking already state outside
+        timeout: time to wait for event
+
+    Returns:
+        True: if call offhook event is received.
+        False: if call offhook event is not received.
+    """
+    if not event_tracking_started:
+        ad.ed.clear_events(EVENT_CALL_STATE_CHANGED)
+        ad.droid.telephonyStartTrackingCallStateForSubscription(sub_id)
+    try:
+        ad.ed.wait_for_event(
+            EVENT_CALL_STATE_CHANGED,
+            is_event_match,
+            timeout=timeout,
+            field=CallStateContainer.CALL_STATE,
+            value=TELEPHONY_STATE_OFFHOOK,
+        )
+        ad.log.info("Got event %s", TELEPHONY_STATE_OFFHOOK)
+    except Empty:
+        ad.log.info("No event for call state change to OFFHOOK")
+        return False
+    finally:
+        if not event_tracking_started:
+            ad.droid.telephonyStopTrackingCallStateChangeForSubscription(sub_id)
+    return True
+
+
+def wait_for_call_offhook_for_subscription(
+    log,
+    ad,
+    sub_id,
+    event_tracking_started=False,
+    timeout=MAX_WAIT_TIME_ACCEPT_CALL_TO_OFFHOOK_EVENT,
+    interval=WAIT_TIME_BETWEEN_STATE_CHECK,
+):
+    """Wait for an incoming call on specified subscription.
+
+    Args:
+        log: log object.
+        ad: android device object.
+        sub_id: subscription ID
+        timeout: time to wait for ring
+        interval: checking interval
+
+    Returns:
+        True: if incoming call is received and answered successfully.
+        False: for errors
+    """
+    if not event_tracking_started:
+        ad.ed.clear_events(EVENT_CALL_STATE_CHANGED)
+        ad.droid.telephonyStartTrackingCallStateForSubscription(sub_id)
+    offhook_event_received = False
+    end_time = time.time() + timeout
+    try:
+        while time.time() < end_time:
+            if not offhook_event_received:
+                if wait_for_call_offhook_event(log, ad, sub_id, True, interval):
+                    offhook_event_received = True
+            telephony_state = ad.droid.telephonyGetCallStateForSubscription(sub_id)
+            telecom_state = ad.droid.telecomGetCallState()
+            if telephony_state == TELEPHONY_STATE_OFFHOOK and (
+                telecom_state == TELEPHONY_STATE_OFFHOOK
+            ):
+                ad.log.info("telephony and telecom are in OFFHOOK state")
+                return True
+            else:
+                ad.log.info(
+                    "telephony in %s, telecom in %s, expecting OFFHOOK state",
+                    telephony_state,
+                    telecom_state,
+                )
+            if offhook_event_received:
+                time.sleep(interval)
+    finally:
+        if not event_tracking_started:
+            ad.droid.telephonyStopTrackingCallStateChangeForSubscription(sub_id)
+
+
+def _wait_for_bluetooth_in_state(log, ad, state, max_wait):
+    # FIXME: These event names should be defined in a common location
+    _BLUETOOTH_STATE_ON_EVENT = "BluetoothStateChangedOn"
+    _BLUETOOTH_STATE_OFF_EVENT = "BluetoothStateChangedOff"
+    ad.ed.clear_events(_BLUETOOTH_STATE_ON_EVENT)
+    ad.ed.clear_events(_BLUETOOTH_STATE_OFF_EVENT)
+
+    ad.droid.bluetoothStartListeningForAdapterStateChange()
+    try:
+        bt_state = ad.droid.bluetoothCheckState()
+        if bt_state == state:
+            return True
+        if max_wait <= 0:
+            ad.log.error(
+                "Time out: bluetooth state still %s, expecting %s", bt_state, state
+            )
+            return False
+
+        event = {False: _BLUETOOTH_STATE_OFF_EVENT, True: _BLUETOOTH_STATE_ON_EVENT}[
+            state
+        ]
+        event = ad.ed.pop_event(event, max_wait)
+        ad.log.info("Got event %s", event["name"])
+        return True
+    except Empty:
+        ad.log.error(
+            "Time out: bluetooth state still in %s, expecting %s", bt_state, state
+        )
+        return False
+    finally:
+        ad.droid.bluetoothStopListeningForAdapterStateChange()
+
+
+def wait_for_droid_in_call(log, ad, max_time):
+    """Wait for android to be in call state.
+
+    Args:
+        log: log object.
+        ad:  android device.
+        max_time: maximal wait time.
+
+    Returns:
+        If phone become in call state within max_time, return True.
+        Return False if timeout.
+    """
+    return _wait_for_droid_in_state(log, ad, max_time, is_phone_in_call)
+
+
+def _wait_for_droid_in_state(log, ad, max_time, state_check_func, *args, **kwargs):
+    while max_time >= 0:
+        if state_check_func(log, ad, *args, **kwargs):
+            return True
+
+        time.sleep(WAIT_TIME_BETWEEN_STATE_CHECK)
+        max_time -= WAIT_TIME_BETWEEN_STATE_CHECK
+
+    return False
+
+
+# TODO: replace this with an event-based function
+def _wait_for_wifi_in_state(log, ad, state, max_wait):
+    return _wait_for_droid_in_state(
+        log,
+        ad,
+        max_wait,
+        lambda log, ad, state: (True if ad.droid.wifiCheckState() == state else False),
+        state,
+    )
diff --git a/src/antlion/controllers/ap_lib/__init__.py b/packages/antlion/controllers/ap_lib/__init__.py
similarity index 100%
rename from src/antlion/controllers/ap_lib/__init__.py
rename to packages/antlion/controllers/ap_lib/__init__.py
diff --git a/packages/antlion/controllers/ap_lib/ap_get_interface.py b/packages/antlion/controllers/ap_lib/ap_get_interface.py
new file mode 100644
index 0000000..7836644
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/ap_get_interface.py
@@ -0,0 +1,192 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+from typing import TYPE_CHECKING
+
+from antlion.libs.proc import job
+
+if TYPE_CHECKING:
+    from antlion.controllers.access_point import AccessPoint
+
+GET_ALL_INTERFACE = "ls /sys/class/net"
+GET_VIRTUAL_INTERFACE = "ls /sys/devices/virtual/net"
+BRCTL_SHOW = "brctl show"
+
+
+class ApInterfacesError(Exception):
+    """Error related to AP interfaces."""
+
+
+class ApInterfaces(object):
+    """Class to get network interface information for the device."""
+
+    def __init__(
+        self, ap: "AccessPoint", wan_interface_override: str | None = None
+    ) -> None:
+        """Initialize the ApInterface class.
+
+        Args:
+            ap: the ap object within ACTS
+            wan_interface_override: wan interface to use if specified by config
+        """
+        self.ssh = ap.ssh
+        self.wan_interface_override = wan_interface_override
+
+    def get_all_interface(self) -> list[str]:
+        """Get all network interfaces on the device.
+
+        Returns:
+            interfaces_all: list of all the network interfaces on device
+        """
+        output = self.ssh.run(GET_ALL_INTERFACE)
+        interfaces_all = output.stdout.split("\n")
+
+        return interfaces_all
+
+    def get_virtual_interface(self) -> list[str]:
+        """Get all virtual interfaces on the device.
+
+        Returns:
+            interfaces_virtual: list of all the virtual interfaces on device
+        """
+        output = self.ssh.run(GET_VIRTUAL_INTERFACE)
+        interfaces_virtual = output.stdout.split("\n")
+
+        return interfaces_virtual
+
+    def get_physical_interface(self) -> list[str]:
+        """Get all the physical interfaces of the device.
+
+        Get all physical interfaces such as eth ports and wlan ports
+
+        Returns:
+            interfaces_phy: list of all the physical interfaces
+        """
+        interfaces_all = self.get_all_interface()
+        interfaces_virtual = self.get_virtual_interface()
+        interfaces_phy = list(set(interfaces_all) - set(interfaces_virtual))
+
+        return interfaces_phy
+
+    def get_bridge_interface(self) -> list[str]:
+        """Get all the bridge interfaces of the device.
+
+        Returns:
+            interfaces_bridge: the list of bridge interfaces, return None if
+                bridge utility is not available on the device
+
+        Raises:
+            ApInterfaceError: Failing to run brctl
+        """
+        try:
+            output = self.ssh.run(BRCTL_SHOW)
+        except job.Error as e:
+            raise ApInterfacesError(f'failed to execute "{BRCTL_SHOW}"') from e
+
+        lines = output.stdout.split("\n")
+        interfaces_bridge = []
+        for line in lines:
+            interfaces_bridge.append(line.split("\t")[0])
+        interfaces_bridge.pop(0)
+        return [x for x in interfaces_bridge if x != ""]
+
+    def get_wlan_interface(self) -> tuple[str, str]:
+        """Get all WLAN interfaces and specify 2.4 GHz and 5 GHz interfaces.
+
+        Returns:
+            interfaces_wlan: all wlan interfaces
+        Raises:
+            ApInterfacesError: Missing at least one WLAN interface
+        """
+        wlan_2g = None
+        wlan_5g = None
+        interfaces_phy = self.get_physical_interface()
+        for iface in interfaces_phy:
+            output = self.ssh.run(f"iwlist {iface} freq")
+            if "Channel 06" in output.stdout and "Channel 36" not in output.stdout:
+                wlan_2g = iface
+            elif "Channel 36" in output.stdout and "Channel 06" not in output.stdout:
+                wlan_5g = iface
+
+        if wlan_2g is None or wlan_5g is None:
+            raise ApInterfacesError("Missing at least one WLAN interface")
+
+        return (wlan_2g, wlan_5g)
+
+    def get_wan_interface(self) -> str:
+        """Get the WAN interface which has internet connectivity. If a wan
+        interface is already specified return that instead.
+
+        Returns:
+            wan: the only one WAN interface
+        Raises:
+            ApInterfacesError: no running WAN can be found
+        """
+        if self.wan_interface_override:
+            return self.wan_interface_override
+
+        wan = None
+        interfaces_phy = self.get_physical_interface()
+        interfaces_wlan = self.get_wlan_interface()
+        interfaces_eth = list(set(interfaces_phy) - set(interfaces_wlan))
+        for iface in interfaces_eth:
+            network_status = self.check_ping(iface)
+            if network_status == 1:
+                wan = iface
+                break
+        if wan:
+            return wan
+
+        output = self.ssh.run("ifconfig")
+        interfaces_all = output.stdout.split("\n")
+        logging.info(f"IFCONFIG output = {interfaces_all}")
+
+        raise ApInterfacesError("No WAN interface available")
+
+    def get_lan_interface(self) -> str | None:
+        """Get the LAN interface connecting to local devices.
+
+        Returns:
+            lan: the only one running LAN interface of the devices
+            None, if nothing was found.
+        """
+        lan = None
+        interfaces_phy = self.get_physical_interface()
+        interfaces_wlan = self.get_wlan_interface()
+        interfaces_eth = list(set(interfaces_phy) - set(interfaces_wlan))
+        interface_wan = self.get_wan_interface()
+        interfaces_eth.remove(interface_wan)
+        for iface in interfaces_eth:
+            output = self.ssh.run(f"ifconfig {iface}")
+            if "RUNNING" in output.stdout:
+                lan = iface
+                break
+        return lan
+
+    def check_ping(self, iface: str) -> int:
+        """Check the ping status on specific interface to determine the WAN.
+
+        Args:
+            iface: the specific interface to check
+        Returns:
+            network_status: the connectivity status of the interface
+        """
+        try:
+            self.ssh.run(f"ping -c 3 -I {iface} 8.8.8.8")
+            return 1
+        except job.Error:
+            return 0
diff --git a/packages/antlion/controllers/ap_lib/ap_iwconfig.py b/packages/antlion/controllers/ap_lib/ap_iwconfig.py
new file mode 100644
index 0000000..01346b9
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/ap_iwconfig.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import TYPE_CHECKING
+
+from antlion.runner import CompletedProcess
+
+if TYPE_CHECKING:
+    from antlion.controllers.access_point import AccessPoint
+
+
+class ApIwconfigError(Exception):
+    """Error related to configuring the wireless interface via iwconfig."""
+
+
+class ApIwconfig(object):
+    """Class to configure wireless interface via iwconfig"""
+
+    PROGRAM_FILE = "/usr/local/sbin/iwconfig"
+
+    def __init__(self, ap: "AccessPoint") -> None:
+        """Initialize the ApIwconfig class.
+
+        Args:
+            ap: the ap object within ACTS
+        """
+        self.ssh = ap.ssh
+
+    def ap_iwconfig(
+        self, interface: str, arguments: str | None = None
+    ) -> CompletedProcess:
+        """Configure the wireless interface using iwconfig.
+
+        Returns:
+            output: the output of the command, if any
+        """
+        return self.ssh.run(f"{self.PROGRAM_FILE} {interface} {arguments}")
diff --git a/packages/antlion/controllers/ap_lib/bridge_interface.py b/packages/antlion/controllers/ap_lib/bridge_interface.py
new file mode 100644
index 0000000..b060267
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/bridge_interface.py
@@ -0,0 +1,116 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import time
+
+from antlion.libs.proc import job
+
+_BRCTL = "brctl"
+BRIDGE_NAME = "br-lan"
+CREATE_BRIDGE = f"{_BRCTL} addbr {BRIDGE_NAME}"
+DELETE_BRIDGE = f"{_BRCTL} delbr {BRIDGE_NAME}"
+BRING_DOWN_BRIDGE = f"ifconfig {BRIDGE_NAME} down"
+
+
+class BridgeInterfaceConfigs(object):
+    """Configs needed for creating bridge interface between LAN and WLAN."""
+
+    def __init__(self, iface_wlan, iface_lan, bridge_ip):
+        """Set bridge interface configs based on the channel info.
+
+        Args:
+            iface_wlan: the wlan interface as part of the bridge
+            iface_lan: the ethernet LAN interface as part of the bridge
+            bridge_ip: the ip address assigned to the bridge interface
+        """
+        self.iface_wlan = iface_wlan
+        self.iface_lan = iface_lan
+        self.bridge_ip = bridge_ip
+
+
+class BridgeInterface(object):
+    """Class object for bridge interface betwen WLAN and LAN"""
+
+    def __init__(self, ap):
+        """Initialize the BridgeInterface class.
+
+        Bridge interface will be added between ethernet LAN port and WLAN port.
+        Args:
+            ap: AP object within ACTS
+        """
+        self.ssh = ap.ssh
+
+    def startup(self, brconfigs):
+        """Start up the bridge interface.
+
+        Args:
+            brconfigs: the bridge interface config, type BridgeInterfaceConfigs
+        """
+
+        logging.info("Create bridge interface between LAN and WLAN")
+        # Create the bridge
+        try:
+            self.ssh.run(CREATE_BRIDGE)
+        except job.Error:
+            logging.warning(
+                f"Bridge interface {BRIDGE_NAME} already exists, no action needed"
+            )
+
+        # Enable 4addr mode on for the wlan interface
+        ENABLE_4ADDR = f"iw dev {brconfigs.iface_wlan} set 4addr on"
+        try:
+            self.ssh.run(ENABLE_4ADDR)
+        except job.Error:
+            logging.warning(f"4addr is already enabled on {brconfigs.iface_wlan}")
+
+        # Add both LAN and WLAN interfaces to the bridge interface
+        for interface in [brconfigs.iface_lan, brconfigs.iface_wlan]:
+            ADD_INTERFACE = f"{_BRCTL} addif {BRIDGE_NAME} {interface}"
+            try:
+                self.ssh.run(ADD_INTERFACE)
+            except job.Error:
+                logging.warning(f"{interface} has already been added to {BRIDGE_NAME}")
+        time.sleep(5)
+
+        # Set IP address on the bridge interface to bring it up
+        SET_BRIDGE_IP = f"ifconfig {BRIDGE_NAME} {brconfigs.bridge_ip}"
+        self.ssh.run(SET_BRIDGE_IP)
+        time.sleep(2)
+
+        # Bridge interface is up
+        logging.info("Bridge interface is up and running")
+
+    def teardown(self, brconfigs):
+        """Tear down the bridge interface.
+
+        Args:
+            brconfigs: the bridge interface config, type BridgeInterfaceConfigs
+        """
+        logging.info("Bringing down the bridge interface")
+        # Delete the bridge interface
+        self.ssh.run(BRING_DOWN_BRIDGE)
+        time.sleep(1)
+        self.ssh.run(DELETE_BRIDGE)
+
+        # Bring down wlan interface and disable 4addr mode
+        BRING_DOWN_WLAN = f"ifconfig {brconfigs.iface_wlan} down"
+        self.ssh.run(BRING_DOWN_WLAN)
+        time.sleep(2)
+        DISABLE_4ADDR = f"iw dev {brconfigs.iface_wlan} set 4addr off"
+        self.ssh.run(DISABLE_4ADDR)
+        time.sleep(1)
+        logging.info("Bridge interface is down")
diff --git a/packages/antlion/controllers/ap_lib/dhcp_config.py b/packages/antlion/controllers/ap_lib/dhcp_config.py
new file mode 100644
index 0000000..5fa8cf0
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/dhcp_config.py
@@ -0,0 +1,205 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import copy
+from ipaddress import IPv4Address, IPv4Network
+
+_ROUTER_DNS = "8.8.8.8, 4.4.4.4"
+
+
+class Subnet(object):
+    """Configs for a subnet  on the dhcp server.
+
+    Attributes:
+        network: ipaddress.IPv4Network, the network that this subnet is in.
+        start: ipaddress.IPv4Address, the start ip address.
+        end: ipaddress.IPv4Address, the end ip address.
+        router: The router to give to all hosts in this subnet.
+        lease_time: The lease time of all hosts in this subnet.
+        additional_parameters: A dictionary corresponding to DHCP parameters.
+        additional_options: A dictionary corresponding to DHCP options.
+    """
+
+    def __init__(
+        self,
+        subnet: IPv4Network,
+        start: IPv4Address | None = None,
+        end: IPv4Address | None = None,
+        router: IPv4Address | None = None,
+        lease_time: int | None = None,
+        additional_parameters: dict[str, str] = {},
+        additional_options: dict[str, int | str] = {},
+    ):
+        """
+        Args:
+            subnet: ipaddress.IPv4Network, The address space of the subnetwork
+                    served by the DHCP server.
+            start: ipaddress.IPv4Address, The start of the address range to
+                   give hosts in this subnet. If not given, the second ip in
+                   the network is used, under the assumption that the first
+                   address is the router.
+            end: ipaddress.IPv4Address, The end of the address range to give
+                 hosts. If not given then the address prior to the broadcast
+                 address (i.e. the second to last ip in the network) is used.
+            router: ipaddress.IPv4Address, The router hosts should use in this
+                    subnet. If not given the first ip in the network is used.
+            lease_time: int, The amount of lease time in seconds
+                        hosts in this subnet have.
+            additional_parameters: A dictionary corresponding to DHCP parameters.
+            additional_options: A dictionary corresponding to DHCP options.
+        """
+        self.network = subnet
+
+        if start:
+            self.start = start
+        else:
+            self.start = self.network[2]
+
+        if not self.start in self.network:
+            raise ValueError("The start range is not in the subnet.")
+        if self.start.is_reserved:
+            raise ValueError("The start of the range cannot be reserved.")
+
+        if end:
+            self.end = end
+        else:
+            self.end = self.network[-2]
+
+        if not self.end in self.network:
+            raise ValueError("The end range is not in the subnet.")
+        if self.end.is_reserved:
+            raise ValueError("The end of the range cannot be reserved.")
+        if self.end < self.start:
+            raise ValueError("The end must be an address larger than the start.")
+
+        if router:
+            if router >= self.start and router <= self.end:
+                raise ValueError("Router must not be in pool range.")
+            if not router in self.network:
+                raise ValueError("Router must be in the given subnet.")
+
+            self.router = router
+        else:
+            # TODO: Use some more clever logic so that we don't have to search
+            # every host potentially.
+            # This is especially important if we support IPv6 networks in this
+            # configuration. The improved logic that we can use is:
+            #    a) erroring out if start and end encompass the whole network, and
+            #    b) picking any address before self.start or after self.end.
+            for host in self.network.hosts():
+                if host < self.start or host > self.end:
+                    self.router = host
+                    break
+
+            if not hasattr(self, "router"):
+                raise ValueError("No useable host found.")
+
+        self.lease_time = lease_time
+        self.additional_parameters = additional_parameters
+        self.additional_options = additional_options
+        if "domain-name-servers" not in self.additional_options:
+            self.additional_options["domain-name-servers"] = _ROUTER_DNS
+
+
+class StaticMapping(object):
+    """Represents a static dhcp host.
+
+    Attributes:
+        identifier: How id of the host (usually the mac addres
+                    e.g. 00:11:22:33:44:55).
+        address: ipaddress.IPv4Address, The ipv4 address to give the host.
+        lease_time: How long to give a lease to this host.
+    """
+
+    def __init__(self, identifier, address, lease_time=None):
+        self.identifier = identifier
+        self.ipv4_address = address
+        self.lease_time = lease_time
+
+
+class DhcpConfig(object):
+    """The configs for a dhcp server.
+
+    Attributes:
+        subnets: A list of all subnets for the dhcp server to create.
+        static_mappings: A list of static host addresses.
+        default_lease_time: The default time for a lease.
+        max_lease_time: The max time to allow a lease.
+    """
+
+    def __init__(
+        self,
+        subnets=None,
+        static_mappings=None,
+        default_lease_time=600,
+        max_lease_time=7200,
+    ):
+        self.subnets = copy.deepcopy(subnets) if subnets else []
+        self.static_mappings = copy.deepcopy(static_mappings) if static_mappings else []
+        self.default_lease_time = default_lease_time
+        self.max_lease_time = max_lease_time
+
+    def render_config_file(self):
+        """Renders the config parameters into a format compatible with
+        the ISC DHCP server (dhcpd).
+        """
+        lines = []
+
+        if self.default_lease_time:
+            lines.append(f"default-lease-time {self.default_lease_time};")
+        if self.max_lease_time:
+            lines.append(f"max-lease-time {self.max_lease_time};")
+
+        for subnet in self.subnets:
+            address = subnet.network.network_address
+            mask = subnet.network.netmask
+            router = subnet.router
+            start = subnet.start
+            end = subnet.end
+            lease_time = subnet.lease_time
+            additional_parameters = subnet.additional_parameters
+            additional_options = subnet.additional_options
+
+            lines.append("subnet %s netmask %s {" % (address, mask))
+            lines.append("\tpool {")
+            lines.append(f"\t\toption subnet-mask {mask};")
+            lines.append(f"\t\toption routers {router};")
+            lines.append(f"\t\trange {start} {end};")
+            if lease_time:
+                lines.append(f"\t\tdefault-lease-time {lease_time};")
+                lines.append(f"\t\tmax-lease-time {lease_time};")
+            for param, value in additional_parameters.items():
+                lines.append(f"\t\t{param} {value};")
+            for option, value in additional_options.items():
+                lines.append(f"\t\toption {option} {value};")
+            lines.append("\t}")
+            lines.append("}")
+
+        for mapping in self.static_mappings:
+            identifier = mapping.identifier
+            fixed_address = mapping.ipv4_address
+            host_fake_name = f"host{identifier.replace(':', '')}"
+            lease_time = mapping.lease_time
+
+            lines.append("host %s {" % host_fake_name)
+            lines.append(f"\thardware ethernet {identifier};")
+            lines.append(f"\tfixed-address {fixed_address};")
+            if lease_time:
+                lines.append(f"\tdefault-lease-time {lease_time};")
+                lines.append(f"\tmax-lease-time {lease_time};")
+            lines.append("}")
+
+        config_str = "\n".join(lines)
+
+        return config_str
diff --git a/packages/antlion/controllers/ap_lib/dhcp_server.py b/packages/antlion/controllers/ap_lib/dhcp_server.py
new file mode 100644
index 0000000..d123acf
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/dhcp_server.py
@@ -0,0 +1,202 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import time
+
+from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_fixed
+
+from antlion import logger
+from antlion.controllers.ap_lib.dhcp_config import DhcpConfig
+from antlion.controllers.utils_lib.commands import shell
+
+
+class Error(Exception):
+    """An error caused by the dhcp server."""
+
+
+class NoInterfaceError(Exception):
+    """Error thrown when the dhcp server has no interfaces on any subnet."""
+
+
+class DhcpServer(object):
+    """Manages the dhcp server program.
+
+    Only one of these can run in an environment at a time.
+
+    Attributes:
+        config: The dhcp server configuration that is being used.
+    """
+
+    PROGRAM_FILE = "dhcpd"
+
+    def __init__(self, runner, interface, working_dir="/tmp"):
+        """
+        Args:
+            runner: Object that has a run_async and run methods for running
+                    shell commands.
+            interface: string, The name of the interface to use.
+            working_dir: The directory to work out of.
+        """
+        self._log = logger.create_logger(lambda msg: f"[DHCP Server|{interface}] {msg}")
+
+        self._runner = runner
+        self._working_dir = working_dir
+        self._shell = shell.ShellCommand(runner, working_dir)
+        self._stdio_log_file = f"dhcpd_{interface}.log"
+        self._config_file = f"dhcpd_{interface}.conf"
+        self._lease_file = f"dhcpd_{interface}.leases"
+        self._pid_file = f"dhcpd_{interface}.pid"
+        self._identifier = f"{self.PROGRAM_FILE}.*{self._config_file}"
+
+    # There is a slight timing issue where if the proc filesystem in Linux
+    # doesn't get updated in time as when this is called, the NoInterfaceError
+    # will happening.  By adding this retry, the error appears to have gone away
+    # but will still show a warning if the problem occurs.  The error seems to
+    # happen more with bridge interfaces than standard interfaces.
+    @retry(
+        retry=retry_if_exception_type(NoInterfaceError),
+        stop=stop_after_attempt(3),
+        wait=wait_fixed(1),
+    )
+    def start(self, config: DhcpConfig, timeout_sec: int = 60) -> None:
+        """Starts the dhcp server.
+
+        Starts the dhcp server daemon and runs it in the background.
+
+        Args:
+            config: Configs to start the dhcp server with.
+
+        Raises:
+            Error: Raised when a dhcp server error is found.
+        """
+        if self.is_alive():
+            self.stop()
+
+        self._write_configs(config)
+        self._shell.delete_file(self._stdio_log_file)
+        self._shell.delete_file(self._pid_file)
+        self._shell.touch_file(self._lease_file)
+
+        dhcpd_command = (
+            f"{self.PROGRAM_FILE} "
+            f'-cf "{self._config_file}" '
+            f"-lf {self._lease_file} "
+            f'-pf "{self._pid_file}" '
+            "-f -d"
+        )
+
+        base_command = f'cd "{self._working_dir}"; {dhcpd_command}'
+        job_str = f'{base_command} > "{self._stdio_log_file}" 2>&1'
+        self._runner.run_async(job_str)
+
+        try:
+            self._wait_for_process(timeout=timeout_sec)
+            self._wait_for_server(timeout=timeout_sec)
+        except:
+            self._log.warn("Failed to start DHCP server.")
+            self._log.info(f"DHCP configuration:\n{config.render_config_file()}\n")
+            self._log.info(f"DHCP logs:\n{self.get_logs()}\n")
+            self.stop()
+            raise
+
+    def stop(self):
+        """Kills the daemon if it is running."""
+        if self.is_alive():
+            self._shell.kill(self._identifier)
+
+    def is_alive(self):
+        """
+        Returns:
+            True if the daemon is running.
+        """
+        return self._shell.is_alive(self._identifier)
+
+    def get_logs(self) -> str:
+        """Pulls the log files from where dhcp server is running.
+
+        Returns:
+            A string of the dhcp server logs.
+        """
+        return self._shell.read_file(self._stdio_log_file)
+
+    def _wait_for_process(self, timeout=60):
+        """Waits for the process to come up.
+
+        Waits until the dhcp server process is found running, or there is
+        a timeout. If the program never comes up then the log file
+        will be scanned for errors.
+
+        Raises: See _scan_for_errors
+        """
+        start_time = time.time()
+        while time.time() - start_time < timeout and not self.is_alive():
+            self._scan_for_errors(False)
+            time.sleep(0.1)
+
+        self._scan_for_errors(True)
+
+    def _wait_for_server(self, timeout=60):
+        """Waits for dhcp server to report that the server is up.
+
+        Waits until dhcp server says the server has been brought up or an
+        error occurs.
+
+        Raises: see _scan_for_errors
+        """
+        start_time = time.time()
+        while time.time() - start_time < timeout:
+            success = self._shell.search_file(
+                "Wrote [0-9]* leases to leases file", self._stdio_log_file
+            )
+            if success:
+                return
+
+            self._scan_for_errors(True)
+
+    def _scan_for_errors(self, should_be_up):
+        """Scans the dhcp server log for any errors.
+
+        Args:
+            should_be_up: If true then dhcp server is expected to be alive.
+                          If it is found not alive while this is true an error
+                          is thrown.
+
+        Raises:
+            Error: Raised when a dhcp server error is found.
+        """
+        # If this is checked last we can run into a race condition where while
+        # scanning the log the process has not died, but after scanning it
+        # has. If this were checked last in that condition then the wrong
+        # error will be thrown. To prevent this we gather the alive state first
+        # so that if it is dead it will definitely give the right error before
+        # just giving a generic one.
+        is_dead = not self.is_alive()
+
+        no_interface = self._shell.search_file(
+            "Not configured to listen on any interfaces", self._stdio_log_file
+        )
+        if no_interface:
+            raise NoInterfaceError(
+                "Dhcp does not contain a subnet for any of the networks the"
+                " current interfaces are on."
+            )
+
+        if should_be_up and is_dead:
+            raise Error("Dhcp server failed to start.", self)
+
+    def _write_configs(self, config):
+        """Writes the configs to the dhcp server config file."""
+        self._shell.delete_file(self._config_file)
+        config_str = config.render_config_file()
+        self._shell.write_file(self._config_file, config_str)
diff --git a/packages/antlion/controllers/ap_lib/extended_capabilities.py b/packages/antlion/controllers/ap_lib/extended_capabilities.py
new file mode 100644
index 0000000..4570409
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/extended_capabilities.py
@@ -0,0 +1,193 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from enum import IntEnum, unique
+
+
+@unique
+class ExtendedCapability(IntEnum):
+    """All extended capabilities present in IEEE 802.11-2020 Table 9-153.
+
+    Each name has a value corresponding to that extended capability's bit offset
+    in the specification's extended capabilities field.
+
+    Note that most extended capabilities are represented by a single bit, which
+    indicates whether the extended capability is advertised by the STA; but
+    some are represented by multiple bits. In the enum, each extended capability
+    has the value of its offset; comments indicate capabilities that use
+    multiple bits.
+    """
+
+    TWENTY_FORTY_BSS_COEXISTENCE_MANAGEMENT_SUPPORT = 0
+    GLK = 1
+    EXTENDED_CHANNEL_SWITCHING = 2
+    GLK_GCR = 3
+    PSMP_CAPABILITY = 4
+    # 5 reserved
+    S_PSMP_SUPPORT = 6
+    EVENT = 7
+    DIAGNOSTICS = 8
+    MULTICAST_DIAGNOSTICS = 9
+    LOCATION_TRACKING = 10
+    FMS = 11
+    PROXY_ARP_SERVICE = 12
+    COLLOCATED_INTERFERENCE_REPORTING = 13
+    CIVIC_LOCATION = 14
+    GEOSPATIAL_LOCATION = 15
+    TFS = 16
+    WNM_SLEEP_MODE = 17
+    TIM_BROADCAST = 18
+    BSS_TRANSITION = 19
+    QOS_TRAFFIC_CAPABILITY = 20
+    AC_STATION_COUNT = 21
+    MULTIPLE_BSSID = 22
+    TIMING_MEASUREMENT = 23
+    CHANNEL_USAGE = 24
+    SSID_LIST = 25
+    DMS = 26
+    UTC_TSF_OFFSET = 27
+    TPU_BUFFER_STA_SUPPORT = 28
+    TDLS_PEER_PSM_SUPPORT = 29
+    TDLS_CHANNEL_SWITCHING = 30
+    INTERWORKING = 31
+    QOS_MAP = 32
+    EBR = 33
+    SSPN_INTERFACE = 34
+    # 35 reserved
+    MSGCF_CAPABILITY = 36
+    TDLS_SUPPORT = 37
+    TDLS_PROHIBITED = 38
+    TDLS_CHANNEL_SWITCHING_PROHIBITED = 39
+    REJECT_UNADMITTED_FRAME = 40
+    SERVICE_INTERVAL_GRANULARITY = 41
+    # Bits 41-43 contain SERVICE_INTERVAL_GRANULARITY value
+    IDENTIFIER_LOCATION = 44
+    U_APSD_COEXISTENCE = 45
+    WNM_NOTIFICATION = 46
+    QAB_CAPABILITY = 47
+    UTF_8_SSID = 48
+    QMF_ACTIVATED = 49
+    QMF_RECONFIGURATION_ACTIVATED = 50
+    ROBUST_AV_STREAMING = 51
+    ADVANCED_GCR = 52
+    MESH_GCR = 53
+    SCS = 54
+    QLOAD_REPORT = 55
+    ALTERNATE_EDCA = 56
+    UNPROTECTED_TXOP_NEGOTIATION = 57
+    PROTECTED_TXOP_NEGOTIATION = 58
+    # 59 reserved
+    PROTECTED_QLOAD_REPORT = 60
+    TDLS_WIDER_BANDWIDTH = 61
+    OPERATING_MODE_NOTIFICATION = 62
+    MAX_NUMBER_OF_MSDUS_IN_A_MSDU = 63
+    # 63-64 contain MAX_NUMBER_OF_MSDUS_IN_A_MSDU value
+    CHANNEL_SCHEDULE_MANAGEMENT = 65
+    GEODATABASE_INBAND_ENABLING_SIGNAL = 66
+    NETWORK_CHANNEL_CONTROL = 67
+    WHITE_SPACE_MAP = 68
+    CHANNEL_AVAILABILITY_QUERY = 69
+    FINE_TIMING_MEASUREMENT_RESPONDER = 70
+    FINE_TIMING_MEASUREMENT_INITIATOR = 71
+    FILS_CAPABILITY = 72
+    EXTENDED_SPECTRUM_MANAGEMENT_CAPABLE = 73
+    FUTURE_CHANNEL_GUIDANCE = 74
+    PAD = 75
+    # 76-79 reserved
+    COMPLETE_LIST_OF_NON_TX_BSSID_PROFILES = 80
+    SAE_PASSWORD_IDENTIFIERS_IN_USE = 81
+    SAE_PASSWORD_IDENTIFIERS_USED_EXCLUSIVELY = 82
+    # 83 reserved
+    BEACON_PROTECTION_ENABLED = 84
+    MIRRORED_SCS = 85
+    # 86 reserved
+    LOCAL_MAC_ADDRESS_POLICY = 87
+    # 88-n reserved
+
+
+def _offsets(ext_cap_offset: ExtendedCapability) -> tuple[int, int]:
+    """For given capability, return the byte and bit offsets within the field.
+
+    802.11 divides the extended capability field into bytes, as does the
+    ExtendedCapabilities class below. This function returns the index of the
+    byte that contains the given extended capability, as well as the bit offset
+    inside that byte (all offsets zero-indexed). For example,
+    MULTICAST_DIAGNOSTICS is bit 9, which is within byte 1 at bit offset 1.
+    """
+    byte_offset = ext_cap_offset // 8
+    bit_offset = ext_cap_offset % 8
+    return byte_offset, bit_offset
+
+
+class ExtendedCapabilities:
+    """Extended capability parsing and representation.
+
+    See IEEE 802.11-2020 9.4.2.26.
+    """
+
+    def __init__(self, ext_cap: bytearray = bytearray()):
+        """Represent the given extended capabilities field.
+
+        Args:
+            ext_cap: IEEE 802.11-2020 9.4.2.26 extended capabilities field.
+            Default is an empty field, meaning no extended capabilities are
+            advertised.
+        """
+        self._ext_cap = ext_cap
+
+    def _capability_advertised(self, ext_cap: ExtendedCapability) -> bool:
+        """Whether an extended capability is advertised.
+
+        Args:
+            ext_cap: an extended capability.
+        Returns:
+            True if the bit is present and its value is 1, otherwise False.
+        Raises:
+            NotImplementedError: for extended capabilities that span more than
+            a single bit. These could be supported, but no callers need them
+            at this time.
+        """
+        if ext_cap in [
+            ExtendedCapability.SERVICE_INTERVAL_GRANULARITY,
+            ExtendedCapability.MAX_NUMBER_OF_MSDUS_IN_A_MSDU,
+        ]:
+            raise NotImplementedError(
+                f"{ext_cap.name} not implemented yet by {self.__class__}"
+            )
+        byte_offset, bit_offset = _offsets(ext_cap)
+        if len(self._ext_cap) > byte_offset:
+            # Use bit_offset to derive a mask that will check the correct bit.
+            if self._ext_cap[byte_offset] & 2**bit_offset > 0:
+                return True
+        return False
+
+    @property
+    def bss_transition(self) -> bool:
+        return self._capability_advertised(ExtendedCapability.BSS_TRANSITION)
+
+    @property
+    def proxy_arp_service(self) -> bool:
+        return self._capability_advertised(ExtendedCapability.PROXY_ARP_SERVICE)
+
+    @property
+    def utc_tsf_offset(self) -> bool:
+        return self._capability_advertised(ExtendedCapability.UTC_TSF_OFFSET)
+
+    @property
+    def wnm_sleep_mode(self) -> bool:
+        return self._capability_advertised(ExtendedCapability.WNM_SLEEP_MODE)
+
+    # Other extended capability property methods can be added as needed by callers.
diff --git a/packages/antlion/controllers/ap_lib/hostapd.py b/packages/antlion/controllers/ap_lib/hostapd.py
new file mode 100644
index 0000000..dba7eeb
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/hostapd.py
@@ -0,0 +1,425 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+import itertools
+import logging
+import re
+import time
+from subprocess import CalledProcessError
+from typing import Any, Iterable
+
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.ap_lib.extended_capabilities import ExtendedCapabilities
+from antlion.controllers.ap_lib.hostapd_config import HostapdConfig
+from antlion.controllers.ap_lib.wireless_network_management import (
+    BssTransitionManagementRequest,
+)
+from antlion.controllers.utils_lib.commands import shell
+from antlion.runner import Runner
+
+PROGRAM_FILE = "/usr/sbin/hostapd"
+CLI_PROGRAM_FILE = "/usr/bin/hostapd_cli"
+
+
+class Error(Exception):
+    """An error caused by hostapd."""
+
+
+class Hostapd(object):
+    """Manages the hostapd program.
+
+    Attributes:
+        config: The hostapd configuration that is being used.
+    """
+
+    def __init__(
+        self, runner: Runner, interface: str, working_dir: str = "/tmp"
+    ) -> None:
+        """
+        Args:
+            runner: Object that has run_async and run methods for executing
+                    shell commands (e.g. connection.SshConnection)
+            interface: The name of the interface to use (eg. wlan0).
+            working_dir: The directory to work out of.
+        """
+        self._runner = runner
+        self._interface = interface
+        self._working_dir = working_dir
+        self.config: HostapdConfig | None = None
+        self._shell = shell.ShellCommand(runner, working_dir)
+        self._log_file = f"hostapd-{self._interface}.log"
+        self._ctrl_file = f"hostapd-{self._interface}.ctrl"
+        self._config_file = f"hostapd-{self._interface}.conf"
+        self._identifier = f"{PROGRAM_FILE}.*{self._config_file}"
+
+    def start(
+        self,
+        config: HostapdConfig,
+        timeout: int = 60,
+        additional_parameters: dict[str, Any] | None = None,
+    ) -> None:
+        """Starts hostapd
+
+        Starts the hostapd daemon and runs it in the background.
+
+        Args:
+            config: Configs to start the hostapd with.
+            timeout: Time to wait for DHCP server to come up.
+            additional_parameters: A dictionary of parameters that can sent
+                                   directly into the hostapd config file.  This
+                                   can be used for debugging and or adding one
+                                   off parameters into the config.
+
+        Returns:
+            True if the daemon could be started. Note that the daemon can still
+            start and not work. Invalid configurations can take a long amount
+            of time to be produced, and because the daemon runs indefinitely
+            it's impossible to wait on. If you need to check if configs are ok
+            then periodic checks to is_running and logs should be used.
+        """
+        if additional_parameters is None:
+            additional_parameters = {}
+
+        if self.is_alive():
+            self.stop()
+
+        self.config = config
+
+        self._shell.delete_file(self._ctrl_file)
+        self._shell.delete_file(self._log_file)
+        self._shell.delete_file(self._config_file)
+        self._write_configs(additional_parameters)
+
+        hostapd_command = f'{PROGRAM_FILE} -dd -t "{self._config_file}"'
+        base_command = f'cd "{self._working_dir}"; {hostapd_command}'
+        job_str = f'rfkill unblock all; {base_command} > "{self._log_file}" 2>&1'
+        self._runner.run_async(job_str)
+
+        try:
+            self._wait_for_process(timeout=timeout)
+            self._wait_for_interface(timeout=timeout)
+        except:
+            self.stop()
+            raise
+
+    def stop(self) -> None:
+        """Kills the daemon if it is running."""
+        if self.is_alive():
+            self._shell.kill(self._identifier)
+
+    def channel_switch(self, channel_num: int) -> None:
+        """Switches to the given channel.
+
+        Returns:
+            acts.libs.proc.job.Result containing the results of the command.
+        Raises: See _run_hostapd_cli_cmd
+        """
+        try:
+            channel_freq = hostapd_constants.FREQUENCY_MAP[channel_num]
+        except KeyError:
+            raise ValueError(f"Invalid channel number {channel_num}")
+        csa_beacon_count = 10
+        channel_switch_cmd = f"chan_switch {csa_beacon_count} {channel_freq}"
+        self._run_hostapd_cli_cmd(channel_switch_cmd)
+
+    def get_current_channel(self) -> int:
+        """Returns the current channel number.
+
+        Raises: See _run_hostapd_cli_cmd
+        """
+        status_cmd = "status"
+        result = self._run_hostapd_cli_cmd(status_cmd)
+        match = re.search(r"^channel=(\d+)$", result, re.MULTILINE)
+        if not match:
+            raise Error("Current channel could not be determined")
+        try:
+            channel = int(match.group(1))
+        except ValueError:
+            raise Error("Internal error: current channel could not be parsed")
+        return channel
+
+    def get_stas(self) -> set[str]:
+        """Return MAC addresses of all associated STAs."""
+        list_sta_result = self._run_hostapd_cli_cmd("list_sta")
+        stas = set()
+        for line in list_sta_result.splitlines():
+            # Each line must be a valid MAC address. Capture it.
+            m = re.match(r"((?:[0-9A-Fa-f]{2}:){5}[0-9A-Fa-f]{2})", line)
+            if m:
+                stas.add(m.group(1))
+        return stas
+
+    def _sta(self, sta_mac: str) -> str:
+        """Return hostapd's detailed info about an associated STA.
+
+        Returns:
+            Results of the command.
+
+        Raises: See _run_hostapd_cli_cmd
+        """
+        return self._run_hostapd_cli_cmd(f"sta {sta_mac}")
+
+    def get_sta_extended_capabilities(self, sta_mac: str) -> ExtendedCapabilities:
+        """Get extended capabilities for the given STA, as seen by the AP.
+
+        Args:
+            sta_mac: MAC address of the STA in question.
+        Returns:
+            Extended capabilities of the given STA.
+        Raises:
+            Error if extended capabilities for the STA cannot be obtained.
+        """
+        sta_result = self._sta(sta_mac)
+        # hostapd ext_capab field is a hex encoded string representation of the
+        # 802.11 extended capabilities structure, each byte represented by two
+        # chars (each byte having format %02x).
+        m = re.search(r"ext_capab=([0-9A-Faf]+)", sta_result, re.MULTILINE)
+        if not m:
+            raise Error("Failed to get ext_capab from STA details")
+        raw_ext_capab = m.group(1)
+        try:
+            return ExtendedCapabilities(bytearray.fromhex(raw_ext_capab))
+        except ValueError:
+            raise Error(f"ext_capab contains invalid hex string repr {raw_ext_capab}")
+
+    def sta_authenticated(self, sta_mac: str) -> bool:
+        """Is the given STA authenticated?
+
+        Args:
+            sta_mac: MAC address of the STA in question.
+        Returns:
+            True if AP sees that the STA is authenticated, False otherwise.
+        Raises:
+            Error if authenticated status for the STA cannot be obtained.
+        """
+        sta_result = self._sta(sta_mac)
+        m = re.search(r"flags=.*\[AUTH\]", sta_result, re.MULTILINE)
+        return bool(m)
+
+    def sta_associated(self, sta_mac: str) -> bool:
+        """Is the given STA associated?
+
+        Args:
+            sta_mac: MAC address of the STA in question.
+        Returns:
+            True if AP sees that the STA is associated, False otherwise.
+        Raises:
+            Error if associated status for the STA cannot be obtained.
+        """
+        sta_result = self._sta(sta_mac)
+        m = re.search(r"flags=.*\[ASSOC\]", sta_result, re.MULTILINE)
+        return bool(m)
+
+    def sta_authorized(self, sta_mac: str) -> bool:
+        """Is the given STA authorized (802.1X controlled port open)?
+
+        Args:
+            sta_mac: MAC address of the STA in question.
+        Returns:
+            True if AP sees that the STA is 802.1X authorized, False otherwise.
+        Raises:
+            Error if authorized status for the STA cannot be obtained.
+        """
+        sta_result = self._sta(sta_mac)
+        m = re.search(r"flags=.*\[AUTHORIZED\]", sta_result, re.MULTILINE)
+        return bool(m)
+
+    def _bss_tm_req(
+        self, client_mac: str, request: BssTransitionManagementRequest
+    ) -> None:
+        """Send a hostapd BSS Transition Management request command to a STA.
+
+        Args:
+            client_mac: MAC address that will receive the request.
+            request: BSS Transition Management request that will be sent.
+        Returns:
+            acts.libs.proc.job.Result containing the results of the command.
+        Raises: See _run_hostapd_cli_cmd
+        """
+        bss_tm_req_cmd = f"bss_tm_req {client_mac}"
+
+        if request.abridged:
+            bss_tm_req_cmd += " abridged=1"
+        if request.bss_termination_included and request.bss_termination_duration:
+            bss_tm_req_cmd += f" bss_term={request.bss_termination_duration.duration}"
+        if request.disassociation_imminent:
+            bss_tm_req_cmd += " disassoc_imminent=1"
+        if request.disassociation_timer is not None:
+            bss_tm_req_cmd += f" disassoc_timer={request.disassociation_timer}"
+        if request.preferred_candidate_list_included:
+            bss_tm_req_cmd += " pref=1"
+        if request.session_information_url:
+            bss_tm_req_cmd += f" url={request.session_information_url}"
+        if request.validity_interval:
+            bss_tm_req_cmd += f" valid_int={request.validity_interval}"
+
+        # neighbor= can appear multiple times, so it requires special handling.
+        if request.candidate_list is not None:
+            for neighbor in request.candidate_list:
+                bssid = neighbor.bssid
+                bssid_info = hex(neighbor.bssid_information)
+                op_class = neighbor.operating_class
+                chan_num = neighbor.channel_number
+                phy_type = int(neighbor.phy_type)
+                bss_tm_req_cmd += (
+                    f" neighbor={bssid},{bssid_info},{op_class},{chan_num},{phy_type}"
+                )
+
+        self._run_hostapd_cli_cmd(bss_tm_req_cmd)
+
+    def send_bss_transition_management_req(
+        self, sta_mac: str, request: BssTransitionManagementRequest
+    ) -> None:
+        """Send a BSS Transition Management request to an associated STA.
+
+        Args:
+            sta_mac: MAC address of the STA in question.
+            request: BSS Transition Management request that will be sent.
+        Returns:
+            acts.libs.proc.job.Result containing the results of the command.
+        Raises: See _run_hostapd_cli_cmd
+        """
+        self._bss_tm_req(sta_mac, request)
+
+    def is_alive(self) -> bool:
+        """
+        Returns:
+            True if the daemon is running.
+        """
+        return self._shell.is_alive(self._identifier)
+
+    def pull_logs(self) -> str:
+        """Pulls the log files from where hostapd is running.
+
+        Returns:
+            A string of the hostapd logs.
+        """
+        # TODO: Auto pulling of logs when stop is called.
+        return self._shell.read_file(self._log_file)
+
+    def _run_hostapd_cli_cmd(self, cmd: str) -> str:
+        """Run the given hostapd_cli command.
+
+        Runs the command, waits for the output (up to default timeout), and
+            returns the result.
+
+        Returns:
+            Results of the ssh command.
+
+        Raises:
+            subprocess.TimeoutExpired: When the remote command took too
+                long to execute.
+            antlion.controllers.utils_lib.ssh.connection.Error: When the ssh
+                connection failed to be created.
+            subprocess.CalledProcessError: Ssh worked, but the command had an
+                error executing.
+        """
+        hostapd_cli_job = (
+            f"cd {self._working_dir}; " f"{CLI_PROGRAM_FILE} -p {self._ctrl_file} {cmd}"
+        )
+        proc = self._runner.run(hostapd_cli_job)
+        if proc.returncode:
+            raise CalledProcessError(
+                proc.returncode, hostapd_cli_job, proc.stdout, proc.stderr
+            )
+        return proc.stdout
+
+    def _wait_for_process(self, timeout: int = 60) -> None:
+        """Waits for the process to come up.
+
+        Waits until the hostapd process is found running, or there is
+        a timeout. If the program never comes up then the log file
+        will be scanned for errors.
+
+        Raises: See _scan_for_errors
+        """
+        start_time = time.time()
+        while time.time() - start_time < timeout and not self.is_alive():
+            self._scan_for_errors(False)
+            time.sleep(0.1)
+
+    def _wait_for_interface(self, timeout: int = 60) -> None:
+        """Waits for hostapd to report that the interface is up.
+
+        Waits until hostapd says the interface has been brought up or an
+        error occurs.
+
+        Raises: see _scan_for_errors
+        """
+        start_time = time.time()
+        while time.time() - start_time < timeout:
+            time.sleep(0.1)
+            success = self._shell.search_file("Setup of interface done", self._log_file)
+            if success:
+                return
+            self._scan_for_errors(False)
+
+        self._scan_for_errors(True)
+
+    def _scan_for_errors(self, should_be_up: bool) -> None:
+        """Scans the hostapd log for any errors.
+
+        Args:
+            should_be_up: If true then hostapd program is expected to be alive.
+                          If it is found not alive while this is true an error
+                          is thrown.
+
+        Raises:
+            Error: Raised when a hostapd error is found.
+        """
+        # Store this so that all other errors have priority.
+        is_dead = not self.is_alive()
+
+        bad_config = self._shell.search_file(
+            "Interface initialization failed", self._log_file
+        )
+        if bad_config:
+            raise Error("Interface failed to start", self)
+
+        bad_config = self._shell.search_file(
+            f"Interface {self._interface} wasn't started", self._log_file
+        )
+        if bad_config:
+            raise Error("Interface failed to start", self)
+
+        if should_be_up and is_dead:
+            raise Error("Hostapd failed to start", self)
+
+    def _write_configs(self, additional_parameters: dict[str, Any]) -> None:
+        """Writes the configs to the hostapd config file."""
+        self._shell.delete_file(self._config_file)
+
+        interface_configs = collections.OrderedDict()
+        interface_configs["interface"] = self._interface
+        interface_configs["ctrl_interface"] = self._ctrl_file
+        pairs: Iterable[str] = (f"{k}={v}" for k, v in interface_configs.items())
+
+        packaged_configs = self.config.package_configs() if self.config else []
+        if additional_parameters:
+            packaged_configs.append(additional_parameters)
+        for packaged_config in packaged_configs:
+            config_pairs = (
+                f"{k}={v}" for k, v in packaged_config.items() if v is not None
+            )
+            pairs = itertools.chain(pairs, config_pairs)
+
+        hostapd_conf = "\n".join(pairs)
+
+        logging.info(f"Writing {self._config_file}")
+        logging.debug("******************Start*******************")
+        logging.debug(f"\n{hostapd_conf}")
+        logging.debug("*******************End********************")
+
+        self._shell.write_file(self._config_file, hostapd_conf)
diff --git a/packages/antlion/controllers/ap_lib/hostapd_ap_preset.py b/packages/antlion/controllers/ap_lib/hostapd_ap_preset.py
new file mode 100644
index 0000000..c15792a
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/hostapd_ap_preset.py
@@ -0,0 +1,543 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+from typing import Any, FrozenSet, TypeVar
+
+from antlion.controllers.ap_lib import hostapd_config, hostapd_constants, hostapd_utils
+from antlion.controllers.ap_lib.hostapd_security import Security
+from antlion.controllers.ap_lib.third_party_ap_profiles import (
+    actiontec,
+    asus,
+    belkin,
+    linksys,
+    netgear,
+    securifi,
+    tplink,
+)
+
+T = TypeVar("T")
+
+
+def _get_or_default(var: T | None, default_value: T) -> T:
+    """Check variable and return non-null value.
+
+    Args:
+         var: Any variable.
+         default_value: Value to return if the var is None.
+
+    Returns:
+         Variable value if not None, default value otherwise.
+    """
+    return var if var is not None else default_value
+
+
+def create_ap_preset(
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    profile_name: str = "whirlwind",
+    channel: int | None = None,
+    mode: str | None = None,
+    frequency: int | None = None,
+    security: Security | None = None,
+    pmf_support: int | None = None,
+    ssid: str | None = None,
+    hidden: bool | None = None,
+    dtim_period: int | None = None,
+    frag_threshold: int | None = None,
+    rts_threshold: int | None = None,
+    force_wmm: bool | None = None,
+    beacon_interval: int | None = None,
+    short_preamble: bool | None = None,
+    n_capabilities: list[Any] | None = None,
+    ac_capabilities: list[Any] | None = None,
+    vht_bandwidth: int | None = None,
+    wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
+    bss_settings: list[Any] = [],
+) -> hostapd_config.HostapdConfig:
+    """AP preset config generator.  This a wrapper for hostapd_config but
+       but supplies the default settings for the preset that is selected.
+
+        You may specify channel or frequency, but not both.  Both options
+        are checked for validity (i.e. you can't specify an invalid channel
+        or a frequency that will not be accepted).
+
+    Args:
+        profile_name: The name of the device want the preset for.
+                      Options: whirlwind
+        channel: Channel number.
+        dtim: DTIM value of the AP, default is 2.
+        frequency: Frequency of channel.
+        security: The security settings to use.
+        ssid: The name of the ssid to broadcast.
+        pmf_support: Whether pmf is disabled, enabled, or required
+        vht_bandwidth: VHT bandwidth for 11ac operation.
+        bss_settings: The settings for all bss.
+        iface_wlan_2g: the wlan 2g interface name of the AP.
+        iface_wlan_5g: the wlan 5g interface name of the AP.
+        mode: The hostapd 802.11 mode of operation.
+        ssid: The ssid for the wireless network.
+        hidden: Whether to include the ssid in the beacons.
+        dtim_period: The dtim period for the BSS
+        frag_threshold: Max size of packet before fragmenting the packet.
+        rts_threshold: Max size of packet before requiring protection for
+            rts/cts or cts to self.
+        n_capabilities: 802.11n capabilities for for BSS to advertise.
+        ac_capabilities: 802.11ac capabilities for for BSS to advertise.
+        wnm_features: WNM features to enable on the AP.
+
+    Returns: A hostapd_config object that can be used by the hostapd object.
+    """
+    if security is None:
+        security = Security()
+
+    # Verify interfaces
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+
+    if channel is not None:
+        frequency = hostapd_config.get_frequency_for_channel(channel)
+    elif frequency is not None:
+        channel = hostapd_config.get_channel_for_frequency(frequency)
+
+    if channel is None or frequency is None:
+        raise ValueError("Must specify channel or frequency")
+
+    if profile_name == "whirlwind":
+        # profile indicates phy mode is 11bgn for 2.4Ghz or 11acn for 5Ghz
+        hidden = _get_or_default(hidden, False)
+        force_wmm = _get_or_default(force_wmm, True)
+        beacon_interval = _get_or_default(beacon_interval, 100)
+        short_preamble = _get_or_default(short_preamble, True)
+        dtim_period = _get_or_default(dtim_period, 2)
+        frag_threshold = _get_or_default(frag_threshold, 2346)
+        rts_threshold = _get_or_default(rts_threshold, 2347)
+        if frequency < 5000:
+            interface = iface_wlan_2g
+            mode = _get_or_default(mode, hostapd_constants.MODE_11N_MIXED)
+            n_capabilities = _get_or_default(
+                n_capabilities,
+                [
+                    hostapd_constants.N_CAPABILITY_LDPC,
+                    hostapd_constants.N_CAPABILITY_SGI20,
+                    hostapd_constants.N_CAPABILITY_SGI40,
+                    hostapd_constants.N_CAPABILITY_TX_STBC,
+                    hostapd_constants.N_CAPABILITY_RX_STBC1,
+                    hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
+                ],
+            )
+            config = hostapd_config.HostapdConfig(
+                ssid=ssid,
+                hidden=hidden,
+                security=security,
+                pmf_support=pmf_support,
+                interface=interface,
+                mode=mode,
+                force_wmm=force_wmm,
+                beacon_interval=beacon_interval,
+                dtim_period=dtim_period,
+                short_preamble=short_preamble,
+                frequency=frequency,
+                n_capabilities=n_capabilities,
+                frag_threshold=frag_threshold,
+                rts_threshold=rts_threshold,
+                wnm_features=wnm_features,
+                bss_settings=bss_settings,
+            )
+        else:
+            interface = iface_wlan_5g
+            vht_bandwidth = _get_or_default(vht_bandwidth, 80)
+            mode = _get_or_default(mode, hostapd_constants.MODE_11AC_MIXED)
+            if hostapd_config.ht40_plus_allowed(channel):
+                extended_channel = hostapd_constants.N_CAPABILITY_HT40_PLUS
+            elif hostapd_config.ht40_minus_allowed(channel):
+                extended_channel = hostapd_constants.N_CAPABILITY_HT40_MINUS
+            # Channel 165 operates in 20MHz with n or ac modes.
+            if channel == 165:
+                mode = hostapd_constants.MODE_11N_MIXED
+                extended_channel = hostapd_constants.N_CAPABILITY_HT20
+            # Define the n capability vector for 20 MHz and higher bandwidth
+            if not vht_bandwidth:
+                n_capabilities = _get_or_default(n_capabilities, [])
+            elif vht_bandwidth >= 40:
+                n_capabilities = _get_or_default(
+                    n_capabilities,
+                    [
+                        hostapd_constants.N_CAPABILITY_LDPC,
+                        extended_channel,
+                        hostapd_constants.N_CAPABILITY_SGI20,
+                        hostapd_constants.N_CAPABILITY_SGI40,
+                        hostapd_constants.N_CAPABILITY_TX_STBC,
+                        hostapd_constants.N_CAPABILITY_RX_STBC1,
+                    ],
+                )
+            else:
+                n_capabilities = _get_or_default(
+                    n_capabilities,
+                    [
+                        hostapd_constants.N_CAPABILITY_LDPC,
+                        hostapd_constants.N_CAPABILITY_SGI20,
+                        hostapd_constants.N_CAPABILITY_SGI40,
+                        hostapd_constants.N_CAPABILITY_TX_STBC,
+                        hostapd_constants.N_CAPABILITY_RX_STBC1,
+                        hostapd_constants.N_CAPABILITY_HT20,
+                    ],
+                )
+            ac_capabilities = _get_or_default(
+                ac_capabilities,
+                [
+                    hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
+                    hostapd_constants.AC_CAPABILITY_RXLDPC,
+                    hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
+                    hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
+                    hostapd_constants.AC_CAPABILITY_RX_STBC_1,
+                    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
+                    hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
+                    hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN,
+                ],
+            )
+            config = hostapd_config.HostapdConfig(
+                ssid=ssid,
+                hidden=hidden,
+                security=security,
+                pmf_support=pmf_support,
+                interface=interface,
+                mode=mode,
+                force_wmm=force_wmm,
+                vht_channel_width=vht_bandwidth,
+                beacon_interval=beacon_interval,
+                dtim_period=dtim_period,
+                short_preamble=short_preamble,
+                frequency=frequency,
+                frag_threshold=frag_threshold,
+                rts_threshold=rts_threshold,
+                n_capabilities=n_capabilities,
+                ac_capabilities=ac_capabilities,
+                bss_settings=bss_settings,
+            )
+    elif profile_name == "whirlwind_11ab_legacy":
+        if frequency < 5000:
+            mode = hostapd_constants.MODE_11B
+        else:
+            mode = hostapd_constants.MODE_11A
+
+        config = create_ap_preset(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            ssid=ssid,
+            channel=channel,
+            mode=mode,
+            security=security,
+            pmf_support=pmf_support,
+            hidden=hidden,
+            force_wmm=force_wmm,
+            beacon_interval=beacon_interval,
+            short_preamble=short_preamble,
+            dtim_period=dtim_period,
+            rts_threshold=rts_threshold,
+            frag_threshold=frag_threshold,
+            n_capabilities=[],
+            ac_capabilities=[],
+            vht_bandwidth=None,
+            wnm_features=wnm_features,
+        )
+    elif profile_name == "whirlwind_11ag_legacy":
+        if frequency < 5000:
+            mode = hostapd_constants.MODE_11G
+        else:
+            mode = hostapd_constants.MODE_11A
+
+        config = create_ap_preset(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            ssid=ssid,
+            channel=channel,
+            mode=mode,
+            security=security,
+            pmf_support=pmf_support,
+            hidden=hidden,
+            force_wmm=force_wmm,
+            beacon_interval=beacon_interval,
+            short_preamble=short_preamble,
+            dtim_period=dtim_period,
+            rts_threshold=rts_threshold,
+            frag_threshold=frag_threshold,
+            n_capabilities=[],
+            ac_capabilities=[],
+            vht_bandwidth=None,
+            wnm_features=wnm_features,
+        )
+    elif profile_name == "mistral":
+        hidden = _get_or_default(hidden, False)
+        force_wmm = _get_or_default(force_wmm, True)
+        beacon_interval = _get_or_default(beacon_interval, 100)
+        short_preamble = _get_or_default(short_preamble, True)
+        dtim_period = _get_or_default(dtim_period, 2)
+        frag_threshold = None
+        rts_threshold = None
+
+        # Google IE
+        # Country Code IE ('us' lowercase)
+        vendor_elements = {
+            "vendor_elements": "dd0cf4f5e80505ff0000ffffffff" "070a75732024041e95051e00"
+        }
+        default_configs = {"bridge": "br-lan", "iapp_interface": "br-lan"}
+        additional_params = (
+            vendor_elements
+            | default_configs
+            | hostapd_constants.ENABLE_RRM_BEACON_REPORT
+            | hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT
+        )
+
+        if frequency < 5000:
+            interface = iface_wlan_2g
+            mode = _get_or_default(mode, hostapd_constants.MODE_11N_MIXED)
+            n_capabilities = _get_or_default(
+                n_capabilities,
+                [
+                    hostapd_constants.N_CAPABILITY_LDPC,
+                    hostapd_constants.N_CAPABILITY_SGI20,
+                    hostapd_constants.N_CAPABILITY_SGI40,
+                    hostapd_constants.N_CAPABILITY_TX_STBC,
+                    hostapd_constants.N_CAPABILITY_RX_STBC1,
+                    hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
+                ],
+            )
+
+            config = hostapd_config.HostapdConfig(
+                ssid=ssid,
+                hidden=hidden,
+                security=security,
+                pmf_support=pmf_support,
+                interface=interface,
+                mode=mode,
+                force_wmm=force_wmm,
+                beacon_interval=beacon_interval,
+                dtim_period=dtim_period,
+                short_preamble=short_preamble,
+                frequency=frequency,
+                n_capabilities=n_capabilities,
+                frag_threshold=frag_threshold,
+                rts_threshold=rts_threshold,
+                wnm_features=wnm_features,
+                bss_settings=bss_settings,
+                additional_parameters=additional_params,
+                set_ap_defaults_profile=profile_name,
+            )
+        else:
+            interface = iface_wlan_5g
+            vht_bandwidth = _get_or_default(vht_bandwidth, 80)
+            mode = _get_or_default(mode, hostapd_constants.MODE_11AC_MIXED)
+            if hostapd_config.ht40_plus_allowed(channel):
+                extended_channel = hostapd_constants.N_CAPABILITY_HT40_PLUS
+            elif hostapd_config.ht40_minus_allowed(channel):
+                extended_channel = hostapd_constants.N_CAPABILITY_HT40_MINUS
+            # Channel 165 operates in 20MHz with n or ac modes.
+            if channel == 165:
+                mode = hostapd_constants.MODE_11N_MIXED
+                extended_channel = hostapd_constants.N_CAPABILITY_HT20
+            if vht_bandwidth >= 40:
+                n_capabilities = _get_or_default(
+                    n_capabilities,
+                    [
+                        hostapd_constants.N_CAPABILITY_LDPC,
+                        extended_channel,
+                        hostapd_constants.N_CAPABILITY_SGI20,
+                        hostapd_constants.N_CAPABILITY_SGI40,
+                        hostapd_constants.N_CAPABILITY_TX_STBC,
+                        hostapd_constants.N_CAPABILITY_RX_STBC1,
+                    ],
+                )
+            else:
+                n_capabilities = _get_or_default(
+                    n_capabilities,
+                    [
+                        hostapd_constants.N_CAPABILITY_LDPC,
+                        hostapd_constants.N_CAPABILITY_SGI20,
+                        hostapd_constants.N_CAPABILITY_SGI40,
+                        hostapd_constants.N_CAPABILITY_TX_STBC,
+                        hostapd_constants.N_CAPABILITY_RX_STBC1,
+                        hostapd_constants.N_CAPABILITY_HT20,
+                    ],
+                )
+            ac_capabilities = _get_or_default(
+                ac_capabilities,
+                [
+                    hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
+                    hostapd_constants.AC_CAPABILITY_RXLDPC,
+                    hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
+                    hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
+                    hostapd_constants.AC_CAPABILITY_RX_STBC_1,
+                    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
+                    hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
+                    hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN,
+                    hostapd_constants.AC_CAPABILITY_SU_BEAMFORMER,
+                    hostapd_constants.AC_CAPABILITY_SU_BEAMFORMEE,
+                    hostapd_constants.AC_CAPABILITY_MU_BEAMFORMER,
+                    hostapd_constants.AC_CAPABILITY_SOUNDING_DIMENSION_4,
+                    hostapd_constants.AC_CAPABILITY_BF_ANTENNA_4,
+                ],
+            )
+
+            config = hostapd_config.HostapdConfig(
+                ssid=ssid,
+                hidden=hidden,
+                security=security,
+                pmf_support=pmf_support,
+                interface=interface,
+                mode=mode,
+                force_wmm=force_wmm,
+                vht_channel_width=vht_bandwidth,
+                beacon_interval=beacon_interval,
+                dtim_period=dtim_period,
+                short_preamble=short_preamble,
+                frequency=frequency,
+                frag_threshold=frag_threshold,
+                rts_threshold=rts_threshold,
+                n_capabilities=n_capabilities,
+                ac_capabilities=ac_capabilities,
+                wnm_features=wnm_features,
+                bss_settings=bss_settings,
+                additional_parameters=additional_params,
+                set_ap_defaults_profile=profile_name,
+            )
+    elif profile_name == "actiontec_pk5000":
+        config = actiontec.actiontec_pk5000(
+            iface_wlan_2g=iface_wlan_2g, channel=channel, ssid=ssid, security=security
+        )
+    elif profile_name == "actiontec_mi424wr":
+        config = actiontec.actiontec_mi424wr(
+            iface_wlan_2g=iface_wlan_2g, channel=channel, ssid=ssid, security=security
+        )
+    elif profile_name == "asus_rtac66u":
+        config = asus.asus_rtac66u(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "asus_rtac86u":
+        config = asus.asus_rtac86u(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "asus_rtac5300":
+        config = asus.asus_rtac5300(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "asus_rtn56u":
+        config = asus.asus_rtn56u(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "asus_rtn66u":
+        config = asus.asus_rtn66u(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "belkin_f9k1001v5":
+        config = belkin.belkin_f9k1001v5(
+            iface_wlan_2g=iface_wlan_2g, channel=channel, ssid=ssid, security=security
+        )
+    elif profile_name == "linksys_ea4500":
+        config = linksys.linksys_ea4500(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "linksys_ea9500":
+        config = linksys.linksys_ea9500(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "linksys_wrt1900acv2":
+        config = linksys.linksys_wrt1900acv2(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "netgear_r7000":
+        config = netgear.netgear_r7000(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "netgear_wndr3400":
+        config = netgear.netgear_wndr3400(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "securifi_almond":
+        config = securifi.securifi_almond(
+            iface_wlan_2g=iface_wlan_2g, channel=channel, ssid=ssid, security=security
+        )
+    elif profile_name == "tplink_archerc5":
+        config = tplink.tplink_archerc5(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "tplink_archerc7":
+        config = tplink.tplink_archerc7(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "tplink_c1200":
+        config = tplink.tplink_c1200(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "tplink_tlwr940n":
+        config = tplink.tplink_tlwr940n(
+            iface_wlan_2g=iface_wlan_2g, channel=channel, ssid=ssid, security=security
+        )
+    else:
+        raise ValueError(f"Invalid ap model specified ({profile_name})")
+
+    return config
diff --git a/packages/antlion/controllers/ap_lib/hostapd_bss_settings.py b/packages/antlion/controllers/ap_lib/hostapd_bss_settings.py
new file mode 100644
index 0000000..2f4d261
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/hostapd_bss_settings.py
@@ -0,0 +1,61 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+
+from antlion.controllers.ap_lib.hostapd_security import Security
+
+
+class BssSettings(object):
+    """Settings for a bss.
+
+    Settings for a bss to allow multiple network on a single device.
+
+    Attributes:
+        name: The name that this bss will go by.
+        ssid: The name of the ssid to broadcast.
+        hidden: If true then the ssid will be hidden.
+        security: The security settings to use.
+        bssid: The bssid to use.
+    """
+
+    def __init__(
+        self,
+        name: str,
+        ssid: str,
+        security: Security,
+        hidden: bool = False,
+        bssid: str | None = None,
+    ):
+        self.name = name
+        self.ssid = ssid
+        self.security = security
+        self.hidden = hidden
+        self.bssid = bssid
+
+    def generate_dict(self) -> dict[str, str | int]:
+        """Returns: A dictionary of bss settings."""
+        settings: dict[str, str | int] = collections.OrderedDict()
+        settings["bss"] = self.name
+        if self.bssid:
+            settings["bssid"] = self.bssid
+        if self.ssid:
+            settings["ssid"] = self.ssid
+            settings["ignore_broadcast_ssid"] = 1 if self.hidden else 0
+
+        security_settings = self.security.generate_dict()
+        for k, v in security_settings.items():
+            settings[k] = v
+
+        return settings
diff --git a/packages/antlion/controllers/ap_lib/hostapd_config.py b/packages/antlion/controllers/ap_lib/hostapd_config.py
new file mode 100644
index 0000000..4cce604
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/hostapd_config.py
@@ -0,0 +1,710 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+import logging
+from typing import Any, FrozenSet
+
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.ap_lib.hostapd_bss_settings import BssSettings
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+
+
+def ht40_plus_allowed(channel: int):
+    """Returns: True iff HT40+ is enabled for this configuration."""
+    channel_supported = (
+        channel
+        in hostapd_constants.HT40_ALLOW_MAP[
+            hostapd_constants.N_CAPABILITY_HT40_PLUS_CHANNELS
+        ]
+    )
+    return channel_supported
+
+
+def ht40_minus_allowed(channel: int):
+    """Returns: True iff HT40- is enabled for this configuration."""
+    channel_supported = (
+        channel
+        in hostapd_constants.HT40_ALLOW_MAP[
+            hostapd_constants.N_CAPABILITY_HT40_MINUS_CHANNELS
+        ]
+    )
+    return channel_supported
+
+
+def get_frequency_for_channel(channel: int):
+    """The frequency associated with a given channel number.
+
+    Args:
+        value: channel number.
+
+    Returns:
+        int, frequency in MHz associated with the channel.
+
+    """
+    for frequency, channel_iter in hostapd_constants.CHANNEL_MAP.items():
+        if channel == channel_iter:
+            return frequency
+    else:
+        raise ValueError(f"Unknown channel value: {channel!r}.")
+
+
+def get_channel_for_frequency(frequency: int):
+    """The channel number associated with a given frequency.
+
+    Args:
+        value: frequency in MHz.
+
+    Returns:
+        int, frequency associated with the channel.
+
+    """
+    return hostapd_constants.CHANNEL_MAP[frequency]
+
+
+class HostapdConfig(object):
+    """The root settings for the router.
+
+    All the settings for a router that are not part of an ssid.
+    """
+
+    def __init__(
+        self,
+        interface: str | None = None,
+        mode: str | None = None,
+        channel: int | None = None,
+        frequency: int | None = None,
+        n_capabilities: list[Any] | None = None,
+        beacon_interval: int | None = None,
+        dtim_period: int | None = None,
+        frag_threshold: int | None = None,
+        rts_threshold: int | None = None,
+        short_preamble: bool | None = None,
+        ssid: str | None = None,
+        hidden: bool = False,
+        security: Security | None = None,
+        bssid: str | None = None,
+        force_wmm: bool | None = None,
+        pmf_support: int | None = None,
+        obss_interval: int | None = None,
+        vht_channel_width: Any | None = None,
+        vht_center_channel: int | None = None,
+        ac_capabilities: list[Any] | None = None,
+        beacon_footer: str = "",
+        spectrum_mgmt_required: bool | None = None,
+        scenario_name: str | None = None,
+        min_streams: int | None = None,
+        wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
+        bss_settings: list[Any] | None = None,
+        additional_parameters: dict[str, Any] | None = None,
+        set_ap_defaults_profile: str = "whirlwind",
+    ) -> None:
+        """Construct a HostapdConfig.
+
+        You may specify channel or frequency, but not both.  Both options
+        are checked for validity (i.e. you can't specify an invalid channel
+        or a frequency that will not be accepted).
+
+        Args:
+            interface: The name of the interface to use.
+            mode: MODE_11x defined above.
+            channel: Channel number.
+            frequency: Frequency of channel.
+            n_capabilities: List of N_CAPABILITY_x defined above.
+            beacon_interval: Beacon interval of AP.
+            dtim_period: Include a DTIM every |dtim_period| beacons.
+            frag_threshold: Maximum outgoing data frame size.
+            rts_threshold: Maximum packet size without requiring explicit
+                protection via rts/cts or cts to self.
+            short_preamble: Whether to use a short preamble.
+            ssid: string, The name of the ssid to broadcast.
+            hidden: Should the ssid be hidden.
+            security: The security settings to use.
+            bssid: A MAC address like string for the BSSID.
+            force_wmm: True if we should force WMM on, False if we should
+                force it off, None if we shouldn't force anything.
+            pmf_support: One of PMF_SUPPORT_* above.  Controls whether the
+                client supports/must support 802.11w. If None, defaults to
+                required with wpa3, else defaults to disabled.
+            obss_interval: Interval in seconds that client should be
+                required to do background scans for overlapping BSSes.
+            vht_channel_width: Object channel width
+            vht_center_channel: Center channel of segment 0.
+            ac_capabilities: List of AC_CAPABILITY_x defined above.
+            beacon_footer: Containing (not validated) IE data to be
+                placed at the end of the beacon.
+            spectrum_mgmt_required: True if we require the DUT to support
+                spectrum management.
+            scenario_name: To be included in file names, instead
+                of the interface name.
+            min_streams: Number of spatial streams required.
+            wnm_features: WNM features to enable on the AP.
+            control_interface: The file name to use as the control interface.
+            bss_settings: The settings for all bss.
+            additional_parameters: A dictionary of additional parameters to add
+                to the hostapd config.
+            set_ap_defaults_profile: profile name to load defaults from
+        """
+        if n_capabilities is None:
+            n_capabilities = []
+        if ac_capabilities is None:
+            ac_capabilities = []
+        if bss_settings is None:
+            bss_settings = []
+        if additional_parameters is None:
+            additional_parameters = {}
+        if security is None:
+            security = Security()
+
+        self.set_ap_defaults_profile = set_ap_defaults_profile
+        self._interface = interface
+        if channel is not None and frequency is not None:
+            raise ValueError("Specify either frequency or channel " "but not both.")
+
+        unknown_caps = [
+            cap
+            for cap in n_capabilities
+            if cap not in hostapd_constants.N_CAPABILITIES_MAPPING
+        ]
+        if unknown_caps:
+            raise ValueError(f"Unknown capabilities: {unknown_caps!r}")
+
+        if channel:
+            self.channel = channel
+        elif frequency:
+            self.frequency = frequency
+        else:
+            raise ValueError("Specify either frequency or channel.")
+
+        self._n_capabilities = set(n_capabilities)
+        if force_wmm is not None:
+            self._wmm_enabled = force_wmm
+        elif self._n_capabilities:
+            self._wmm_enabled = True
+        if self._n_capabilities and mode is None:
+            mode = hostapd_constants.MODE_11N_PURE
+        self._mode = mode
+
+        if not self.supports_frequency(self.frequency):
+            raise ValueError(
+                "Configured a mode %s that does not support "
+                "frequency %d" % (self._mode, self.frequency)
+            )
+
+        self._beacon_interval = beacon_interval
+        self._dtim_period = dtim_period
+        self._frag_threshold = frag_threshold
+        self._rts_threshold = rts_threshold
+        self._short_preamble = short_preamble
+        self._ssid = ssid
+        self._hidden = hidden
+        self._security = security
+        self._bssid = bssid
+        # Default PMF Values
+        if pmf_support is None:
+            if self.security and self.security.security_mode is SecurityMode.WPA3:
+                # Set PMF required for WP3
+                self._pmf_support = hostapd_constants.PMF_SUPPORT_REQUIRED
+            elif self.security and self.security.security_mode.is_wpa3():
+                # Default PMF to enabled for WPA3 mixed modes (can be
+                # overwritten by explicitly provided value)
+                self._pmf_support = hostapd_constants.PMF_SUPPORT_ENABLED
+            else:
+                # Default PMD to disabled for all other modes (can be
+                # overwritten by explicitly provided value)
+                self._pmf_support = hostapd_constants.PMF_SUPPORT_DISABLED
+        elif pmf_support not in hostapd_constants.PMF_SUPPORT_VALUES:
+            raise ValueError(f"Invalid value for pmf_support: {pmf_support!r}")
+        elif (
+            pmf_support != hostapd_constants.PMF_SUPPORT_REQUIRED
+            and self.security
+            and self.security.security_mode is SecurityMode.WPA3
+        ):
+            raise ValueError("PMF support must be required with wpa3.")
+        else:
+            self._pmf_support = pmf_support
+        self._obss_interval = obss_interval
+        if self.is_11ac:
+            if str(vht_channel_width) == "40" or str(vht_channel_width) == "20":
+                self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_40
+            elif str(vht_channel_width) == "80":
+                self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_80
+            elif str(vht_channel_width) == "160":
+                self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_160
+            elif str(vht_channel_width) == "80+80":
+                self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_80_80
+            elif vht_channel_width is not None:
+                raise ValueError("Invalid channel width")
+            else:
+                logging.warning(
+                    "No channel bandwidth specified.  Using 80MHz for 11ac."
+                )
+                self._vht_oper_chwidth = 1
+            if vht_center_channel is not None:
+                self._vht_oper_centr_freq_seg0_idx = vht_center_channel
+            elif vht_channel_width == 20 and channel is not None:
+                self._vht_oper_centr_freq_seg0_idx = channel
+            else:
+                self._vht_oper_centr_freq_seg0_idx = (
+                    self._get_11ac_center_channel_from_channel(self.channel)
+                )
+            self._ac_capabilities = set(ac_capabilities)
+        self._beacon_footer = beacon_footer
+        self._spectrum_mgmt_required = spectrum_mgmt_required
+        self._scenario_name = scenario_name
+        self._min_streams = min_streams
+        self._wnm_features = wnm_features
+        self._additional_parameters = additional_parameters
+
+        self._bss_lookup: dict[str, BssSettings] = collections.OrderedDict()
+        for bss in bss_settings:
+            if bss.name in self._bss_lookup:
+                raise ValueError(
+                    "Cannot have multiple bss settings with the same name."
+                )
+            self._bss_lookup[bss.name] = bss
+
+    def _get_11ac_center_channel_from_channel(self, channel: int) -> int:
+        """Returns the center channel of the selected channel band based
+        on the channel and channel bandwidth provided.
+        """
+        channel = int(channel)
+        center_channel_delta = hostapd_constants.CENTER_CHANNEL_MAP[
+            self._vht_oper_chwidth
+        ]["delta"]
+
+        for channel_map in hostapd_constants.CENTER_CHANNEL_MAP[self._vht_oper_chwidth][
+            "channels"
+        ]:
+            lower_channel_bound, upper_channel_bound = channel_map
+            if lower_channel_bound <= channel <= upper_channel_bound:
+                return lower_channel_bound + center_channel_delta
+        raise ValueError(f"Invalid channel for {self._vht_oper_chwidth}.")
+
+    @property
+    def _get_default_config(self):
+        """Returns: dict of default options for hostapd."""
+        if self.set_ap_defaults_profile == "mistral":
+            return collections.OrderedDict(
+                [
+                    ("logger_syslog", "-1"),
+                    ("logger_syslog_level", "0"),
+                    # default RTS and frag threshold to ``off''
+                    ("rts_threshold", None),
+                    ("fragm_threshold", None),
+                    ("driver", hostapd_constants.DRIVER_NAME),
+                ]
+            )
+        else:
+            return collections.OrderedDict(
+                [
+                    ("logger_syslog", "-1"),
+                    ("logger_syslog_level", "0"),
+                    # default RTS and frag threshold to ``off''
+                    ("rts_threshold", "2347"),
+                    ("fragm_threshold", "2346"),
+                    ("driver", hostapd_constants.DRIVER_NAME),
+                ]
+            )
+
+    @property
+    def _hostapd_ht_capabilities(self):
+        """Returns: string suitable for the ht_capab= line in a hostapd config."""
+        ret = []
+        for cap in hostapd_constants.N_CAPABILITIES_MAPPING.keys():
+            if cap in self._n_capabilities:
+                ret.append(hostapd_constants.N_CAPABILITIES_MAPPING[cap])
+        return "".join(ret)
+
+    @property
+    def _hostapd_vht_capabilities(self):
+        """Returns: string suitable for the vht_capab= line in a hostapd config."""
+        ret = []
+        for cap in hostapd_constants.AC_CAPABILITIES_MAPPING.keys():
+            if cap in self._ac_capabilities:
+                ret.append(hostapd_constants.AC_CAPABILITIES_MAPPING[cap])
+        return "".join(ret)
+
+    @property
+    def _require_ht(self):
+        """Returns: True iff clients should be required to support HT."""
+        return self._mode == hostapd_constants.MODE_11N_PURE
+
+    @property
+    def _require_vht(self):
+        """Returns: True if clients should be required to support VHT."""
+        return self._mode == hostapd_constants.MODE_11AC_PURE
+
+    @property
+    def hw_mode(self):
+        """Returns: string hardware mode understood by hostapd."""
+        if self._mode == hostapd_constants.MODE_11A:
+            return hostapd_constants.MODE_11A
+        if self._mode == hostapd_constants.MODE_11B:
+            return hostapd_constants.MODE_11B
+        if self._mode == hostapd_constants.MODE_11G:
+            return hostapd_constants.MODE_11G
+        if self.is_11n or self.is_11ac:
+            # For their own historical reasons, hostapd wants it this way.
+            if self._frequency > 5000:
+                return hostapd_constants.MODE_11A
+            return hostapd_constants.MODE_11G
+        raise ValueError("Invalid mode.")
+
+    @property
+    def is_11n(self):
+        """Returns: True if we're trying to host an 802.11n network."""
+        return self._mode in (
+            hostapd_constants.MODE_11N_MIXED,
+            hostapd_constants.MODE_11N_PURE,
+        )
+
+    @property
+    def is_11ac(self):
+        """Returns: True if we're trying to host an 802.11ac network."""
+        return self._mode in (
+            hostapd_constants.MODE_11AC_MIXED,
+            hostapd_constants.MODE_11AC_PURE,
+        )
+
+    @property
+    def channel(self):
+        """Returns: int channel number for self.frequency."""
+        return get_channel_for_frequency(self.frequency)
+
+    @channel.setter
+    def channel(self, value):
+        """Sets the channel number to configure hostapd to listen on.
+
+        Args:
+            value: int, channel number.
+
+        """
+        self.frequency = get_frequency_for_channel(value)
+
+    @property
+    def bssid(self) -> str | None:
+        return self._bssid
+
+    @bssid.setter
+    def bssid(self, value: str):
+        self._bssid = value
+
+    @property
+    def frequency(self) -> int:
+        """Returns: frequency for hostapd to listen on."""
+        return self._frequency
+
+    @frequency.setter
+    def frequency(self, value: int):
+        """Sets the frequency for hostapd to listen on.
+
+        Args:
+            value: int, frequency in MHz.
+
+        """
+        if value not in hostapd_constants.CHANNEL_MAP:
+            raise ValueError(f"Tried to set an invalid frequency: {value!r}.")
+
+        self._frequency = value
+
+    @property
+    def bss_lookup(self) -> dict[str, BssSettings]:
+        return self._bss_lookup
+
+    @property
+    def ssid(self) -> str | None:
+        """Returns: SsidSettings, The root Ssid settings being used."""
+        return self._ssid
+
+    @ssid.setter
+    def ssid(self, value: str):
+        """Sets the ssid for the hostapd.
+
+        Args:
+            value: SsidSettings, new ssid settings to use.
+
+        """
+        self._ssid = value
+
+    @property
+    def hidden(self):
+        """Returns: bool, True if the ssid is hidden, false otherwise."""
+        return self._hidden
+
+    @hidden.setter
+    def hidden(self, value: bool):
+        """Sets if this ssid is hidden.
+
+        Args:
+            value: If true the ssid will be hidden.
+        """
+        self.hidden = value
+
+    @property
+    def security(self) -> Security:
+        """Returns: The security type being used."""
+        return self._security
+
+    @security.setter
+    def security(self, value: Security):
+        """Sets the security options to use.
+
+        Args:
+            value: The type of security to use.
+        """
+        self._security = value
+
+    @property
+    def ht_packet_capture_mode(self) -> str | None:
+        """Get an appropriate packet capture HT parameter.
+
+        When we go to configure a raw monitor we need to configure
+        the phy to listen on the correct channel.  Part of doing
+        so is to specify the channel width for HT channels.  In the
+        case that the AP is configured to be either HT40+ or HT40-,
+        we could return the wrong parameter because we don't know which
+        configuration will be chosen by hostap.
+
+        Returns:
+            string, HT parameter for frequency configuration.
+
+        """
+        if not self.is_11n:
+            return None
+
+        if ht40_plus_allowed(self.channel):
+            return "HT40+"
+
+        if ht40_minus_allowed(self.channel):
+            return "HT40-"
+
+        return "HT20"
+
+    @property
+    def beacon_footer(self) -> str:
+        return self._beacon_footer
+
+    @beacon_footer.setter
+    def beacon_footer(self, value: str):
+        """Changes the beacon footer.
+
+        Args:
+            value: The beacon footer value.
+        """
+        self._beacon_footer = value
+
+    @property
+    def scenario_name(self) -> str | None:
+        return self._scenario_name
+
+    @property
+    def min_streams(self) -> int | None:
+        return self._min_streams
+
+    @property
+    def wnm_features(self) -> FrozenSet[hostapd_constants.WnmFeature]:
+        return self._wnm_features
+
+    @wnm_features.setter
+    def wnm_features(self, value: FrozenSet[hostapd_constants.WnmFeature]):
+        self._wnm_features = value
+
+    def __repr__(self) -> str:
+        return (
+            "%s(mode=%r, channel=%r, frequency=%r, "
+            "n_capabilities=%r, beacon_interval=%r, "
+            "dtim_period=%r, frag_threshold=%r, ssid=%r, bssid=%r, "
+            "wmm_enabled=%r, security_config=%r, "
+            "spectrum_mgmt_required=%r)"
+            % (
+                self.__class__.__name__,
+                self._mode,
+                self.channel,
+                self.frequency,
+                self._n_capabilities,
+                self._beacon_interval,
+                self._dtim_period,
+                self._frag_threshold,
+                self._ssid,
+                self._bssid,
+                self._wmm_enabled,
+                self._security,
+                self._spectrum_mgmt_required,
+            )
+        )
+
+    def supports_channel(self, value: int) -> bool:
+        """Check whether channel is supported by the current hardware mode.
+
+        @param value: channel to check.
+        @return True iff the current mode supports the band of the channel.
+
+        """
+        for freq, channel in hostapd_constants.CHANNEL_MAP.items():
+            if channel == value:
+                return self.supports_frequency(freq)
+
+        return False
+
+    def supports_frequency(self, frequency: int) -> bool:
+        """Check whether frequency is supported by the current hardware mode.
+
+        @param frequency: frequency to check.
+        @return True iff the current mode supports the band of the frequency.
+
+        """
+        if self._mode == hostapd_constants.MODE_11A and frequency < 5000:
+            return False
+
+        if (
+            self._mode in (hostapd_constants.MODE_11B, hostapd_constants.MODE_11G)
+            and frequency > 5000
+        ):
+            return False
+
+        if frequency not in hostapd_constants.CHANNEL_MAP:
+            return False
+
+        channel = hostapd_constants.CHANNEL_MAP[frequency]
+        supports_plus = (
+            channel
+            in hostapd_constants.HT40_ALLOW_MAP[
+                hostapd_constants.N_CAPABILITY_HT40_PLUS_CHANNELS
+            ]
+        )
+        supports_minus = (
+            channel
+            in hostapd_constants.HT40_ALLOW_MAP[
+                hostapd_constants.N_CAPABILITY_HT40_MINUS_CHANNELS
+            ]
+        )
+        if (
+            hostapd_constants.N_CAPABILITY_HT40_PLUS in self._n_capabilities
+            and not supports_plus
+        ):
+            return False
+
+        if (
+            hostapd_constants.N_CAPABILITY_HT40_MINUS in self._n_capabilities
+            and not supports_minus
+        ):
+            return False
+
+        return True
+
+    def add_bss(self, bss: BssSettings) -> None:
+        """Adds a new bss setting.
+
+        Args:
+            bss: The bss settings to add.
+        """
+        if bss.name in self._bss_lookup:
+            raise ValueError("A bss with the same name already exists.")
+
+        self._bss_lookup[bss.name] = bss
+
+    def remove_bss(self, bss_name: str) -> None:
+        """Removes a bss setting from the config."""
+        del self._bss_lookup[bss_name]
+
+    def package_configs(self) -> list[dict[str, str | int]]:
+        """Package the configs.
+
+        Returns:
+            A list of dictionaries, one dictionary for each section of the
+            config.
+        """
+        # Start with the default config parameters.
+        conf = self._get_default_config
+
+        if self._interface:
+            conf["interface"] = self._interface
+        if self._bssid:
+            conf["bssid"] = self._bssid
+        if self._ssid:
+            conf["ssid"] = self._ssid
+            conf["ignore_broadcast_ssid"] = 1 if self._hidden else 0
+        conf["channel"] = self.channel
+        conf["hw_mode"] = self.hw_mode
+        if self.is_11n or self.is_11ac:
+            conf["ieee80211n"] = 1
+            conf["ht_capab"] = self._hostapd_ht_capabilities
+        if self.is_11ac:
+            conf["ieee80211ac"] = 1
+            conf["vht_oper_chwidth"] = self._vht_oper_chwidth
+            conf["vht_oper_centr_freq_seg0_idx"] = self._vht_oper_centr_freq_seg0_idx
+            conf["vht_capab"] = self._hostapd_vht_capabilities
+        if self._wmm_enabled is not None:
+            conf["wmm_enabled"] = 1 if self._wmm_enabled else 0
+        if self._require_ht:
+            conf["require_ht"] = 1
+        if self._require_vht:
+            conf["require_vht"] = 1
+        if self._beacon_interval:
+            conf["beacon_int"] = self._beacon_interval
+        if self._dtim_period:
+            conf["dtim_period"] = self._dtim_period
+        if self._frag_threshold:
+            conf["fragm_threshold"] = self._frag_threshold
+        if self._rts_threshold:
+            conf["rts_threshold"] = self._rts_threshold
+        if self._pmf_support:
+            conf["ieee80211w"] = self._pmf_support
+        if self._obss_interval:
+            conf["obss_interval"] = self._obss_interval
+        if self._short_preamble:
+            conf["preamble"] = 1
+        if self._spectrum_mgmt_required:
+            # To set spectrum_mgmt_required, we must first set
+            # local_pwr_constraint. And to set local_pwr_constraint,
+            # we must first set ieee80211d. And to set ieee80211d, ...
+            # Point being: order matters here.
+            conf["country_code"] = "US"  # Required for local_pwr_constraint
+            conf["ieee80211d"] = 1  # Required for local_pwr_constraint
+            conf["local_pwr_constraint"] = 0  # No local constraint
+            conf["spectrum_mgmt_required"] = 1  # Requires local_pwr_constraint
+
+        for k, v in self._security.generate_dict().items():
+            conf[k] = v
+
+        all_conf = [conf]
+
+        for bss in self._bss_lookup.values():
+            bss_conf = collections.OrderedDict()
+            for k, v in (bss.generate_dict()).items():
+                bss_conf[k] = v
+            all_conf.append(bss_conf)
+
+        for wnm_feature in self._wnm_features:
+            if wnm_feature == hostapd_constants.WnmFeature.TIME_ADVERTISEMENT:
+                conf.update(hostapd_constants.ENABLE_WNM_TIME_ADVERTISEMENT)
+            elif wnm_feature == hostapd_constants.WnmFeature.WNM_SLEEP_MODE:
+                conf.update(hostapd_constants.ENABLE_WNM_SLEEP_MODE)
+            elif wnm_feature == hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT:
+                conf.update(hostapd_constants.ENABLE_WNM_BSS_TRANSITION_MANAGEMENT)
+            elif wnm_feature == hostapd_constants.WnmFeature.PROXY_ARP:
+                conf.update(hostapd_constants.ENABLE_WNM_PROXY_ARP)
+            elif (
+                wnm_feature
+                == hostapd_constants.WnmFeature.IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST
+            ):
+                conf.update(
+                    hostapd_constants.ENABLE_WNM_IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST
+                )
+
+        if self._additional_parameters:
+            all_conf.append(self._additional_parameters)
+
+        return all_conf
diff --git a/packages/antlion/controllers/ap_lib/hostapd_constants.py b/packages/antlion/controllers/ap_lib/hostapd_constants.py
new file mode 100755
index 0000000..ea6fdb2
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/hostapd_constants.py
@@ -0,0 +1,938 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import itertools
+from enum import Enum, StrEnum, auto, unique
+from typing import TypedDict
+
+# TODO(http://b/286584981): Replace with BandType
+BAND_2G = "2g"
+BAND_5G = "5g"
+
+
+@unique
+class BandType(StrEnum):
+    BAND_2G = "2g"
+    BAND_5G = "5g"
+
+    def default_channel(self) -> int:
+        match self:
+            case BandType.BAND_2G:
+                return 6
+            case BandType.BAND_5G:
+                return 36
+
+
+CHANNEL_BANDWIDTH_20MHZ = 20
+CHANNEL_BANDWIDTH_40MHZ = 40
+CHANNEL_BANDWIDTH_80MHZ = 80
+CHANNEL_BANDWIDTH_160MHZ = 160
+
+# TODO(http://b/286584981): Replace with SecurityModeInt
+WEP = 0
+WPA1 = 1
+WPA2 = 2
+WPA3 = 2  # same as wpa2 and wpa2/wpa3, distinguished by wpa_key_mgmt
+MIXED = 3  # applies to wpa/wpa2, and wpa/wpa2/wpa3, distinguished by wpa_key_mgmt
+ENT = 4  # get the correct constant
+
+MAX_WPA_PSK_LENGTH = 64
+MIN_WPA_PSK_LENGTH = 8
+MAX_WPA_PASSWORD_LENGTH = 63
+WPA_STRICT_REKEY = 1
+WPA_DEFAULT_CIPHER = "TKIP"
+WPA2_DEFAULT_CIPER = "CCMP"
+WPA_GROUP_KEY_ROTATION_TIME = 600
+WPA_STRICT_REKEY_DEFAULT = True
+
+# TODO(http://b/286584981): Replace these with SecurityMode enum
+WEP_STRING = "wep"
+WPA_STRING = "wpa"
+WPA2_STRING = "wpa2"
+WPA_MIXED_STRING = "wpa/wpa2"
+WPA3_STRING = "wpa3"
+WPA2_WPA3_MIXED_STRING = "wpa2/wpa3"
+WPA_WPA2_WPA3_MIXED_STRING = "wpa/wpa2/wpa3"
+ENT_STRING = "ent"
+
+# TODO(http://b/286584981): Replace with KeyManagement
+ENT_KEY_MGMT = "WPA-EAP"
+WPA_PSK_KEY_MGMT = "WPA-PSK"
+SAE_KEY_MGMT = "SAE"
+DUAL_WPA_PSK_SAE_KEY_MGMT = "WPA-PSK SAE"
+
+# TODO(http://b/286584981): Replace with SecurityMode.security_mode_int
+SECURITY_STRING_TO_SECURITY_MODE_INT = {
+    WPA_STRING: WPA1,
+    WPA2_STRING: WPA2,
+    WPA_MIXED_STRING: MIXED,
+    WPA3_STRING: WPA3,
+    WPA2_WPA3_MIXED_STRING: WPA3,
+    WPA_WPA2_WPA3_MIXED_STRING: MIXED,
+    WEP_STRING: WEP,
+    ENT_STRING: ENT,
+}
+
+# TODO(http://b/286584981): Replace with SecurityMode.key_management
+SECURITY_STRING_TO_WPA_KEY_MGMT = {
+    WPA_STRING: WPA_PSK_KEY_MGMT,
+    WPA2_STRING: WPA_PSK_KEY_MGMT,
+    WPA_MIXED_STRING: WPA_PSK_KEY_MGMT,
+    WPA3_STRING: SAE_KEY_MGMT,
+    WPA2_WPA3_MIXED_STRING: DUAL_WPA_PSK_SAE_KEY_MGMT,
+    WPA_WPA2_WPA3_MIXED_STRING: DUAL_WPA_PSK_SAE_KEY_MGMT,
+}
+
+# TODO(http://b/286584981): Replace with SecurityMode.fuchsia_security_type
+SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY = {
+    WEP_STRING: WEP_STRING,
+    WPA_STRING: WPA_STRING,
+    WPA2_STRING: WPA2_STRING,
+    WPA_MIXED_STRING: WPA2_STRING,
+    WPA3_STRING: WPA3_STRING,
+    WPA2_WPA3_MIXED_STRING: WPA3_STRING,
+    WPA_WPA2_WPA3_MIXED_STRING: WPA3_STRING,
+}
+
+IEEE8021X = 1
+WLAN0_STRING = "wlan0"
+WLAN1_STRING = "wlan1"
+WLAN2_STRING = "wlan2"
+WLAN3_STRING = "wlan3"
+WLAN0_GALE = "wlan-2400mhz"
+WLAN1_GALE = "wlan-5000mhz"
+WEP_DEFAULT_KEY = 0
+WEP_HEX_LENGTH = [10, 26, 32, 58]
+WEP_STR_LENGTH = [5, 13, 16]
+WEP_DEFAULT_STR_LENGTH = 13
+
+# TODO(http://b/286584981): Replace with BandType.default_channel()
+AP_DEFAULT_CHANNEL_2G = 6
+AP_DEFAULT_CHANNEL_5G = 36
+
+AP_DEFAULT_MAX_SSIDS_2G = 8
+AP_DEFAULT_MAX_SSIDS_5G = 8
+AP_SSID_LENGTH_2G = 8
+AP_SSID_MIN_LENGTH_2G = 1
+AP_SSID_MAX_LENGTH_2G = 32
+AP_PASSPHRASE_LENGTH_2G = 10
+AP_SSID_LENGTH_5G = 8
+AP_SSID_MIN_LENGTH_5G = 1
+AP_SSID_MAX_LENGTH_5G = 32
+AP_PASSPHRASE_LENGTH_5G = 10
+INTERFACE_2G_LIST = [WLAN0_STRING, WLAN0_GALE]
+INTERFACE_5G_LIST = [WLAN1_STRING, WLAN1_GALE]
+HIGH_BEACON_INTERVAL = 300
+LOW_BEACON_INTERVAL = 100
+HIGH_DTIM = 3
+LOW_DTIM = 1
+
+# A mapping of frequency to channel number.  This includes some
+# frequencies used outside the US.
+CHANNEL_MAP = {
+    2412: 1,
+    2417: 2,
+    2422: 3,
+    2427: 4,
+    2432: 5,
+    2437: 6,
+    2442: 7,
+    2447: 8,
+    2452: 9,
+    2457: 10,
+    2462: 11,
+    # 12, 13 are only legitimate outside the US.
+    2467: 12,
+    2472: 13,
+    # 14 is for Japan, DSSS and CCK only.
+    2484: 14,
+    # 34 valid in Japan.
+    5170: 34,
+    # 36-116 valid in the US, except 38, 42, and 46, which have
+    # mixed international support.
+    5180: 36,
+    5190: 38,
+    5200: 40,
+    5210: 42,
+    5220: 44,
+    5230: 46,
+    5240: 48,
+    # DFS channels.
+    5260: 52,
+    5280: 56,
+    5300: 60,
+    5320: 64,
+    5500: 100,
+    5520: 104,
+    5540: 108,
+    5560: 112,
+    5580: 116,
+    # 120, 124, 128 valid in Europe/Japan.
+    5600: 120,
+    5620: 124,
+    5640: 128,
+    # 132+ valid in US.
+    5660: 132,
+    5680: 136,
+    5700: 140,
+    # 144 is supported by a subset of WiFi chips
+    # (e.g. bcm4354, but not ath9k).
+    5720: 144,
+    # End DFS channels.
+    5745: 149,
+    5755: 151,
+    5765: 153,
+    5775: 155,
+    5795: 159,
+    5785: 157,
+    5805: 161,
+    5825: 165,
+}
+FREQUENCY_MAP = {v: k for k, v in CHANNEL_MAP.items()}
+
+US_CHANNELS_2G = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
+US_CHANNELS_5G = [
+    36,
+    40,
+    44,
+    48,
+    52,
+    56,
+    60,
+    64,
+    100,
+    104,
+    108,
+    112,
+    116,
+    120,
+    124,
+    128,
+    132,
+    136,
+    140,
+    144,
+    149,
+    153,
+    157,
+    161,
+    165,
+]
+
+LOWEST_5G_CHANNEL = 36
+
+MODE_11A = "a"
+MODE_11B = "b"
+MODE_11G = "g"
+MODE_11N_MIXED = "n-mixed"
+MODE_11N_PURE = "n-only"
+MODE_11AC_MIXED = "ac-mixed"
+MODE_11AC_PURE = "ac-only"
+
+N_CAPABILITY_LDPC = object()
+N_CAPABILITY_HT20 = object()
+N_CAPABILITY_HT40_PLUS = object()
+N_CAPABILITY_HT40_MINUS = object()
+N_CAPABILITY_GREENFIELD = object()
+N_CAPABILITY_SGI20 = object()
+N_CAPABILITY_SGI40 = object()
+N_CAPABILITY_TX_STBC = object()
+N_CAPABILITY_RX_STBC1 = object()
+N_CAPABILITY_RX_STBC12 = object()
+N_CAPABILITY_RX_STBC123 = object()
+N_CAPABILITY_DSSS_CCK_40 = object()
+N_CAPABILITY_LSIG_TXOP_PROT = object()
+N_CAPABILITY_40_INTOLERANT = object()
+N_CAPABILITY_MAX_AMSDU_7935 = object()
+N_CAPABILITY_DELAY_BLOCK_ACK = object()
+N_CAPABILITY_SMPS_STATIC = object()
+N_CAPABILITY_SMPS_DYNAMIC = object()
+N_CAPABILITIES_MAPPING = {
+    N_CAPABILITY_LDPC: "[LDPC]",
+    N_CAPABILITY_HT20: "[HT20]",
+    N_CAPABILITY_HT40_PLUS: "[HT40+]",
+    N_CAPABILITY_HT40_MINUS: "[HT40-]",
+    N_CAPABILITY_GREENFIELD: "[GF]",
+    N_CAPABILITY_SGI20: "[SHORT-GI-20]",
+    N_CAPABILITY_SGI40: "[SHORT-GI-40]",
+    N_CAPABILITY_TX_STBC: "[TX-STBC]",
+    N_CAPABILITY_RX_STBC1: "[RX-STBC1]",
+    N_CAPABILITY_RX_STBC12: "[RX-STBC12]",
+    N_CAPABILITY_RX_STBC123: "[RX-STBC123]",
+    N_CAPABILITY_DSSS_CCK_40: "[DSSS_CCK-40]",
+    N_CAPABILITY_LSIG_TXOP_PROT: "[LSIG-TXOP-PROT]",
+    N_CAPABILITY_40_INTOLERANT: "[40-INTOLERANT]",
+    N_CAPABILITY_MAX_AMSDU_7935: "[MAX-AMSDU-7935]",
+    N_CAPABILITY_DELAY_BLOCK_ACK: "[DELAYED-BA]",
+    N_CAPABILITY_SMPS_STATIC: "[SMPS-STATIC]",
+    N_CAPABILITY_SMPS_DYNAMIC: "[SMPS-DYNAMIC]",
+}
+N_CAPABILITIES_MAPPING_INVERSE = {v: k for k, v in N_CAPABILITIES_MAPPING.items()}
+N_CAPABILITY_HT40_MINUS_CHANNELS = object()
+N_CAPABILITY_HT40_PLUS_CHANNELS = object()
+AC_CAPABILITY_VHT160 = object()
+AC_CAPABILITY_VHT160_80PLUS80 = object()
+AC_CAPABILITY_RXLDPC = object()
+AC_CAPABILITY_SHORT_GI_80 = object()
+AC_CAPABILITY_SHORT_GI_160 = object()
+AC_CAPABILITY_TX_STBC_2BY1 = object()
+AC_CAPABILITY_RX_STBC_1 = object()
+AC_CAPABILITY_RX_STBC_12 = object()
+AC_CAPABILITY_RX_STBC_123 = object()
+AC_CAPABILITY_RX_STBC_1234 = object()
+AC_CAPABILITY_SU_BEAMFORMER = object()
+AC_CAPABILITY_SU_BEAMFORMEE = object()
+AC_CAPABILITY_BF_ANTENNA_2 = object()
+AC_CAPABILITY_BF_ANTENNA_3 = object()
+AC_CAPABILITY_BF_ANTENNA_4 = object()
+AC_CAPABILITY_SOUNDING_DIMENSION_2 = object()
+AC_CAPABILITY_SOUNDING_DIMENSION_3 = object()
+AC_CAPABILITY_SOUNDING_DIMENSION_4 = object()
+AC_CAPABILITY_MU_BEAMFORMER = object()
+AC_CAPABILITY_MU_BEAMFORMEE = object()
+AC_CAPABILITY_VHT_TXOP_PS = object()
+AC_CAPABILITY_HTC_VHT = object()
+AC_CAPABILITY_MAX_A_MPDU_LEN_EXP0 = object()
+AC_CAPABILITY_MAX_A_MPDU_LEN_EXP1 = object()
+AC_CAPABILITY_MAX_A_MPDU_LEN_EXP2 = object()
+AC_CAPABILITY_MAX_A_MPDU_LEN_EXP3 = object()
+AC_CAPABILITY_MAX_A_MPDU_LEN_EXP4 = object()
+AC_CAPABILITY_MAX_A_MPDU_LEN_EXP5 = object()
+AC_CAPABILITY_MAX_A_MPDU_LEN_EXP6 = object()
+AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7 = object()
+AC_CAPABILITY_VHT_LINK_ADAPT2 = object()
+AC_CAPABILITY_VHT_LINK_ADAPT3 = object()
+AC_CAPABILITY_RX_ANTENNA_PATTERN = object()
+AC_CAPABILITY_TX_ANTENNA_PATTERN = object()
+AC_CAPABILITY_MAX_MPDU_7991 = object()
+AC_CAPABILITY_MAX_MPDU_11454 = object()
+AC_CAPABILITIES_MAPPING = {
+    AC_CAPABILITY_VHT160: "[VHT160]",
+    AC_CAPABILITY_VHT160_80PLUS80: "[VHT160-80PLUS80]",
+    AC_CAPABILITY_RXLDPC: "[RXLDPC]",
+    AC_CAPABILITY_SHORT_GI_80: "[SHORT-GI-80]",
+    AC_CAPABILITY_SHORT_GI_160: "[SHORT-GI-160]",
+    AC_CAPABILITY_TX_STBC_2BY1: "[TX-STBC-2BY1]",
+    AC_CAPABILITY_RX_STBC_1: "[RX-STBC-1]",
+    AC_CAPABILITY_RX_STBC_12: "[RX-STBC-12]",
+    AC_CAPABILITY_RX_STBC_123: "[RX-STBC-123]",
+    AC_CAPABILITY_RX_STBC_1234: "[RX-STBC-1234]",
+    AC_CAPABILITY_SU_BEAMFORMER: "[SU-BEAMFORMER]",
+    AC_CAPABILITY_SU_BEAMFORMEE: "[SU-BEAMFORMEE]",
+    AC_CAPABILITY_BF_ANTENNA_2: "[BF-ANTENNA-2]",
+    AC_CAPABILITY_BF_ANTENNA_3: "[BF-ANTENNA-3]",
+    AC_CAPABILITY_BF_ANTENNA_4: "[BF-ANTENNA-4]",
+    AC_CAPABILITY_SOUNDING_DIMENSION_2: "[SOUNDING-DIMENSION-2]",
+    AC_CAPABILITY_SOUNDING_DIMENSION_3: "[SOUNDING-DIMENSION-3]",
+    AC_CAPABILITY_SOUNDING_DIMENSION_4: "[SOUNDING-DIMENSION-4]",
+    AC_CAPABILITY_MU_BEAMFORMER: "[MU-BEAMFORMER]",
+    AC_CAPABILITY_MU_BEAMFORMEE: "[MU-BEAMFORMEE]",
+    AC_CAPABILITY_VHT_TXOP_PS: "[VHT-TXOP-PS]",
+    AC_CAPABILITY_HTC_VHT: "[HTC-VHT]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP0: "[MAX-A-MPDU-LEN-EXP0]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP1: "[MAX-A-MPDU-LEN-EXP1]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP2: "[MAX-A-MPDU-LEN-EXP2]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP3: "[MAX-A-MPDU-LEN-EXP3]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP4: "[MAX-A-MPDU-LEN-EXP4]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP5: "[MAX-A-MPDU-LEN-EXP5]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP6: "[MAX-A-MPDU-LEN-EXP6]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7: "[MAX-A-MPDU-LEN-EXP7]",
+    AC_CAPABILITY_VHT_LINK_ADAPT2: "[VHT-LINK-ADAPT2]",
+    AC_CAPABILITY_VHT_LINK_ADAPT3: "[VHT-LINK-ADAPT3]",
+    AC_CAPABILITY_RX_ANTENNA_PATTERN: "[RX-ANTENNA-PATTERN]",
+    AC_CAPABILITY_TX_ANTENNA_PATTERN: "[TX-ANTENNA-PATTERN]",
+    AC_CAPABILITY_MAX_MPDU_11454: "[MAX-MPDU-11454]",
+    AC_CAPABILITY_MAX_MPDU_7991: "[MAX-MPDU-7991]",
+}
+AC_CAPABILITIES_MAPPING_INVERSE = {v: k for k, v in AC_CAPABILITIES_MAPPING.items()}
+VHT_CHANNEL_WIDTH_40 = 0
+VHT_CHANNEL_WIDTH_80 = 1
+VHT_CHANNEL_WIDTH_160 = 2
+VHT_CHANNEL_WIDTH_80_80 = 3
+
+VHT_CHANNEL = {
+    40: VHT_CHANNEL_WIDTH_40,
+    80: VHT_CHANNEL_WIDTH_80,
+    160: VHT_CHANNEL_WIDTH_160,
+}
+
+# This is a loose merging of the rules for US and EU regulatory
+# domains as taken from IEEE Std 802.11-2012 Appendix E.  For instance,
+# we tolerate HT40 in channels 149-161 (not allowed in EU), but also
+# tolerate HT40+ on channel 7 (not allowed in the US).  We take the loose
+# definition so that we don't prohibit testing in either domain.
+HT40_ALLOW_MAP = {
+    N_CAPABILITY_HT40_MINUS_CHANNELS: tuple(
+        itertools.chain(range(6, 14), range(40, 65, 8), range(104, 145, 8), [153, 161])
+    ),
+    N_CAPABILITY_HT40_PLUS_CHANNELS: tuple(
+        itertools.chain(range(1, 8), range(36, 61, 8), range(100, 141, 8), [149, 157])
+    ),
+}
+
+PMF_SUPPORT_DISABLED = 0
+PMF_SUPPORT_ENABLED = 1
+PMF_SUPPORT_REQUIRED = 2
+PMF_SUPPORT_VALUES = (PMF_SUPPORT_DISABLED, PMF_SUPPORT_ENABLED, PMF_SUPPORT_REQUIRED)
+
+DRIVER_NAME = "nl80211"
+
+
+class VHTChannelWidth(TypedDict):
+    delta: int
+    channels: list[tuple[int, int]]
+
+
+CENTER_CHANNEL_MAP = {
+    VHT_CHANNEL_WIDTH_40: VHTChannelWidth(
+        delta=2,
+        channels=[
+            (36, 40),
+            (44, 48),
+            (52, 56),
+            (60, 64),
+            (100, 104),
+            (108, 112),
+            (116, 120),
+            (124, 128),
+            (132, 136),
+            (140, 144),
+            (149, 153),
+            (157, 161),
+        ],
+    ),
+    VHT_CHANNEL_WIDTH_80: VHTChannelWidth(
+        delta=6,
+        channels=[
+            (36, 48),
+            (52, 64),
+            (100, 112),
+            (116, 128),
+            (132, 144),
+            (149, 161),
+        ],
+    ),
+    VHT_CHANNEL_WIDTH_160: VHTChannelWidth(
+        delta=14,
+        channels=[(36, 64), (100, 128)],
+    ),
+}
+
+OFDM_DATA_RATES = {"supported_rates": "60 90 120 180 240 360 480 540"}
+
+CCK_DATA_RATES = {"supported_rates": "10 20 55 110"}
+
+CCK_AND_OFDM_DATA_RATES = {
+    "supported_rates": "10 20 55 110 60 90 120 180 240 360 480 540"
+}
+
+OFDM_ONLY_BASIC_RATES = {"basic_rates": "60 120 240"}
+
+CCK_AND_OFDM_BASIC_RATES = {"basic_rates": "10 20 55 110"}
+
+WEP_AUTH = {
+    "open": {"auth_algs": 1},
+    "shared": {"auth_algs": 2},
+    "open_and_shared": {"auth_algs": 3},
+}
+
+WMM_11B_DEFAULT_PARAMS = {
+    "wmm_ac_bk_cwmin": 5,
+    "wmm_ac_bk_cwmax": 10,
+    "wmm_ac_bk_aifs": 7,
+    "wmm_ac_bk_txop_limit": 0,
+    "wmm_ac_be_aifs": 3,
+    "wmm_ac_be_cwmin": 5,
+    "wmm_ac_be_cwmax": 7,
+    "wmm_ac_be_txop_limit": 0,
+    "wmm_ac_vi_aifs": 2,
+    "wmm_ac_vi_cwmin": 4,
+    "wmm_ac_vi_cwmax": 5,
+    "wmm_ac_vi_txop_limit": 188,
+    "wmm_ac_vo_aifs": 2,
+    "wmm_ac_vo_cwmin": 3,
+    "wmm_ac_vo_cwmax": 4,
+    "wmm_ac_vo_txop_limit": 102,
+}
+
+WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS = {
+    "wmm_ac_bk_cwmin": 4,
+    "wmm_ac_bk_cwmax": 10,
+    "wmm_ac_bk_aifs": 7,
+    "wmm_ac_bk_txop_limit": 0,
+    "wmm_ac_be_aifs": 3,
+    "wmm_ac_be_cwmin": 4,
+    "wmm_ac_be_cwmax": 10,
+    "wmm_ac_be_txop_limit": 0,
+    "wmm_ac_vi_aifs": 2,
+    "wmm_ac_vi_cwmin": 3,
+    "wmm_ac_vi_cwmax": 4,
+    "wmm_ac_vi_txop_limit": 94,
+    "wmm_ac_vo_aifs": 2,
+    "wmm_ac_vo_cwmin": 2,
+    "wmm_ac_vo_cwmax": 3,
+    "wmm_ac_vo_txop_limit": 47,
+}
+
+WMM_NON_DEFAULT_PARAMS = {
+    "wmm_ac_bk_cwmin": 5,
+    "wmm_ac_bk_cwmax": 9,
+    "wmm_ac_bk_aifs": 3,
+    "wmm_ac_bk_txop_limit": 94,
+    "wmm_ac_be_aifs": 2,
+    "wmm_ac_be_cwmin": 2,
+    "wmm_ac_be_cwmax": 8,
+    "wmm_ac_be_txop_limit": 0,
+    "wmm_ac_vi_aifs": 1,
+    "wmm_ac_vi_cwmin": 7,
+    "wmm_ac_vi_cwmax": 10,
+    "wmm_ac_vi_txop_limit": 47,
+    "wmm_ac_vo_aifs": 1,
+    "wmm_ac_vo_cwmin": 6,
+    "wmm_ac_vo_cwmax": 10,
+    "wmm_ac_vo_txop_limit": 94,
+}
+
+WMM_DEGRADED_VO_PARAMS = {
+    "wmm_ac_bk_cwmin": 7,
+    "wmm_ac_bk_cwmax": 15,
+    "wmm_ac_bk_aifs": 2,
+    "wmm_ac_bk_txop_limit": 0,
+    "wmm_ac_be_aifs": 2,
+    "wmm_ac_be_cwmin": 7,
+    "wmm_ac_be_cwmax": 15,
+    "wmm_ac_be_txop_limit": 0,
+    "wmm_ac_vi_aifs": 2,
+    "wmm_ac_vi_cwmin": 7,
+    "wmm_ac_vi_cwmax": 15,
+    "wmm_ac_vi_txop_limit": 94,
+    "wmm_ac_vo_aifs": 10,
+    "wmm_ac_vo_cwmin": 7,
+    "wmm_ac_vo_cwmax": 15,
+    "wmm_ac_vo_txop_limit": 47,
+}
+
+WMM_DEGRADED_VI_PARAMS = {
+    "wmm_ac_bk_cwmin": 7,
+    "wmm_ac_bk_cwmax": 15,
+    "wmm_ac_bk_aifs": 2,
+    "wmm_ac_bk_txop_limit": 0,
+    "wmm_ac_be_aifs": 2,
+    "wmm_ac_be_cwmin": 7,
+    "wmm_ac_be_cwmax": 15,
+    "wmm_ac_be_txop_limit": 0,
+    "wmm_ac_vi_aifs": 10,
+    "wmm_ac_vi_cwmin": 7,
+    "wmm_ac_vi_cwmax": 15,
+    "wmm_ac_vi_txop_limit": 94,
+    "wmm_ac_vo_aifs": 2,
+    "wmm_ac_vo_cwmin": 7,
+    "wmm_ac_vo_cwmax": 15,
+    "wmm_ac_vo_txop_limit": 47,
+}
+
+WMM_IMPROVE_BE_PARAMS = {
+    "wmm_ac_bk_cwmin": 7,
+    "wmm_ac_bk_cwmax": 15,
+    "wmm_ac_bk_aifs": 10,
+    "wmm_ac_bk_txop_limit": 0,
+    "wmm_ac_be_aifs": 2,
+    "wmm_ac_be_cwmin": 7,
+    "wmm_ac_be_cwmax": 15,
+    "wmm_ac_be_txop_limit": 0,
+    "wmm_ac_vi_aifs": 10,
+    "wmm_ac_vi_cwmin": 7,
+    "wmm_ac_vi_cwmax": 15,
+    "wmm_ac_vi_txop_limit": 94,
+    "wmm_ac_vo_aifs": 10,
+    "wmm_ac_vo_cwmin": 7,
+    "wmm_ac_vo_cwmax": 15,
+    "wmm_ac_vo_txop_limit": 47,
+}
+
+WMM_IMPROVE_BK_PARAMS = {
+    "wmm_ac_bk_cwmin": 7,
+    "wmm_ac_bk_cwmax": 15,
+    "wmm_ac_bk_aifs": 2,
+    "wmm_ac_bk_txop_limit": 0,
+    "wmm_ac_be_aifs": 10,
+    "wmm_ac_be_cwmin": 7,
+    "wmm_ac_be_cwmax": 15,
+    "wmm_ac_be_txop_limit": 0,
+    "wmm_ac_vi_aifs": 10,
+    "wmm_ac_vi_cwmin": 7,
+    "wmm_ac_vi_cwmax": 15,
+    "wmm_ac_vi_txop_limit": 94,
+    "wmm_ac_vo_aifs": 10,
+    "wmm_ac_vo_cwmin": 7,
+    "wmm_ac_vo_cwmax": 15,
+    "wmm_ac_vo_txop_limit": 47,
+}
+
+WMM_ACM_BK = {"wmm_ac_bk_acm": 1}
+WMM_ACM_BE = {"wmm_ac_be_acm": 1}
+WMM_ACM_VI = {"wmm_ac_vi_acm": 1}
+WMM_ACM_VO = {"wmm_ac_vo_acm": 1}
+
+UAPSD_ENABLED = {"uapsd_advertisement_enabled": 1}
+
+UTF_8_SSID = {"utf8_ssid": 1}
+
+ENABLE_RRM_BEACON_REPORT = {"rrm_beacon_report": 1}
+ENABLE_RRM_NEIGHBOR_REPORT = {"rrm_neighbor_report": 1}
+
+# Wireless Network Management (AKA 802.11v) features.
+ENABLE_WNM_TIME_ADVERTISEMENT = {"time_advertisement": 2, "time_zone": "EST5"}
+ENABLE_WNM_SLEEP_MODE = {"wnm_sleep_mode": 1}
+ENABLE_WNM_BSS_TRANSITION_MANAGEMENT = {"bss_transition": 1}
+ENABLE_WNM_PROXY_ARP = {"proxy_arp": 1}
+ENABLE_WNM_IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST = {"na_mcast_to_ucast": 1}
+
+VENDOR_IE = {
+    "correct_length_beacon": {"vendor_elements": "dd0411223301"},
+    "too_short_length_beacon": {"vendor_elements": "dd0311223301"},
+    "too_long_length_beacon": {"vendor_elements": "dd0511223301"},
+    "zero_length_beacon_with_data": {"vendor_elements": "dd0011223301"},
+    "zero_length_beacon_without_data": {"vendor_elements": "dd00"},
+    "simliar_to_wpa": {"vendor_elements": "dd040050f203"},
+    "correct_length_association_response": {"assocresp_elements": "dd0411223301"},
+    "too_short_length_association_response": {"assocresp_elements": "dd0311223301"},
+    "too_long_length_association_response": {"assocresp_elements": "dd0511223301"},
+    "zero_length_association_response_with_data": {
+        "assocresp_elements": "dd0011223301"
+    },
+    "zero_length_association_response_without_data": {"assocresp_elements": "dd00"},
+}
+
+ENABLE_IEEE80211D = {"ieee80211d": 1}
+
+COUNTRY_STRING = {
+    "ALL": {"country3": "0x20"},
+    "OUTDOOR": {"country3": "0x4f"},
+    "INDOOR": {"country3": "0x49"},
+    "NONCOUNTRY": {"country3": "0x58"},
+    "GLOBAL": {"country3": "0x04"},
+}
+
+COUNTRY_CODE = {
+    "AFGHANISTAN": {"country_code": "AF"},
+    "ALAND_ISLANDS": {"country_code": "AX"},
+    "ALBANIA": {"country_code": "AL"},
+    "ALGERIA": {"country_code": "DZ"},
+    "AMERICAN_SAMOA": {"country_code": "AS"},
+    "ANDORRA": {"country_code": "AD"},
+    "ANGOLA": {"country_code": "AO"},
+    "ANGUILLA": {"country_code": "AI"},
+    "ANTARCTICA": {"country_code": "AQ"},
+    "ANTIGUA_AND_BARBUDA": {"country_code": "AG"},
+    "ARGENTINA": {"country_code": "AR"},
+    "ARMENIA": {"country_code": "AM"},
+    "ARUBA": {"country_code": "AW"},
+    "AUSTRALIA": {"country_code": "AU"},
+    "AUSTRIA": {"country_code": "AT"},
+    "AZERBAIJAN": {"country_code": "AZ"},
+    "BAHAMAS": {"country_code": "BS"},
+    "BAHRAIN": {"country_code": "BH"},
+    "BANGLADESH": {"country_code": "BD"},
+    "BARBADOS": {"country_code": "BB"},
+    "BELARUS": {"country_code": "BY"},
+    "BELGIUM": {"country_code": "BE"},
+    "BELIZE": {"country_code": "BZ"},
+    "BENIN": {"country_code": "BJ"},
+    "BERMUDA": {"country_code": "BM"},
+    "BHUTAN": {"country_code": "BT"},
+    "BOLIVIA": {"country_code": "BO"},
+    "BONAIRE": {"country_code": "BQ"},
+    "BOSNIA_AND_HERZEGOVINA": {"country_code": "BA"},
+    "BOTSWANA": {"country_code": "BW"},
+    "BOUVET_ISLAND": {"country_code": "BV"},
+    "BRAZIL": {"country_code": "BR"},
+    "BRITISH_INDIAN_OCEAN_TERRITORY": {"country_code": "IO"},
+    "BRUNEI_DARUSSALAM": {"country_code": "BN"},
+    "BULGARIA": {"country_code": "BG"},
+    "BURKINA_FASO": {"country_code": "BF"},
+    "BURUNDI": {"country_code": "BI"},
+    "CAMBODIA": {"country_code": "KH"},
+    "CAMEROON": {"country_code": "CM"},
+    "CANADA": {"country_code": "CA"},
+    "CAPE_VERDE": {"country_code": "CV"},
+    "CAYMAN_ISLANDS": {"country_code": "KY"},
+    "CENTRAL_AFRICAN_REPUBLIC": {"country_code": "CF"},
+    "CHAD": {"country_code": "TD"},
+    "CHILE": {"country_code": "CL"},
+    "CHINA": {"country_code": "CN"},
+    "CHRISTMAS_ISLAND": {"country_code": "CX"},
+    "COCOS_ISLANDS": {"country_code": "CC"},
+    "COLOMBIA": {"country_code": "CO"},
+    "COMOROS": {"country_code": "KM"},
+    "CONGO": {"country_code": "CG"},
+    "DEMOCRATIC_REPUBLIC_CONGO": {"country_code": "CD"},
+    "COOK_ISLANDS": {"country_code": "CK"},
+    "COSTA_RICA": {"country_code": "CR"},
+    "COTE_D_IVOIRE": {"country_code": "CI"},
+    "CROATIA": {"country_code": "HR"},
+    "CUBA": {"country_code": "CU"},
+    "CURACAO": {"country_code": "CW"},
+    "CYPRUS": {"country_code": "CY"},
+    "CZECH_REPUBLIC": {"country_code": "CZ"},
+    "DENMARK": {"country_code": "DK"},
+    "DJIBOUTI": {"country_code": "DJ"},
+    "DOMINICA": {"country_code": "DM"},
+    "DOMINICAN_REPUBLIC": {"country_code": "DO"},
+    "ECUADOR": {"country_code": "EC"},
+    "EGYPT": {"country_code": "EG"},
+    "EL_SALVADOR": {"country_code": "SV"},
+    "EQUATORIAL_GUINEA": {"country_code": "GQ"},
+    "ERITREA": {"country_code": "ER"},
+    "ESTONIA": {"country_code": "EE"},
+    "ETHIOPIA": {"country_code": "ET"},
+    "FALKLAND_ISLANDS_(MALVINAS)": {"country_code": "FK"},
+    "FAROE_ISLANDS": {"country_code": "FO"},
+    "FIJI": {"country_code": "FJ"},
+    "FINLAND": {"country_code": "FI"},
+    "FRANCE": {"country_code": "FR"},
+    "FRENCH_GUIANA": {"country_code": "GF"},
+    "FRENCH_POLYNESIA": {"country_code": "PF"},
+    "FRENCH_SOUTHERN_TERRITORIES": {"country_code": "TF"},
+    "GABON": {"country_code": "GA"},
+    "GAMBIA": {"country_code": "GM"},
+    "GEORGIA": {"country_code": "GE"},
+    "GERMANY": {"country_code": "DE"},
+    "GHANA": {"country_code": "GH"},
+    "GIBRALTAR": {"country_code": "GI"},
+    "GREECE": {"country_code": "GR"},
+    "GREENLAND": {"country_code": "GL"},
+    "GRENADA": {"country_code": "GD"},
+    "GUADELOUPE": {"country_code": "GP"},
+    "GUAM": {"country_code": "GU"},
+    "GUATEMALA": {"country_code": "GT"},
+    "GUERNSEY": {"country_code": "GG"},
+    "GUINEA": {"country_code": "GN"},
+    "GUINEA-BISSAU": {"country_code": "GW"},
+    "GUYANA": {"country_code": "GY"},
+    "HAITI": {"country_code": "HT"},
+    "HEARD_ISLAND_AND_MCDONALD_ISLANDS": {"country_code": "HM"},
+    "VATICAN_CITY_STATE": {"country_code": "VA"},
+    "HONDURAS": {"country_code": "HN"},
+    "HONG_KONG": {"country_code": "HK"},
+    "HUNGARY": {"country_code": "HU"},
+    "ICELAND": {"country_code": "IS"},
+    "INDIA": {"country_code": "IN"},
+    "INDONESIA": {"country_code": "ID"},
+    "IRAN": {"country_code": "IR"},
+    "IRAQ": {"country_code": "IQ"},
+    "IRELAND": {"country_code": "IE"},
+    "ISLE_OF_MAN": {"country_code": "IM"},
+    "ISRAEL": {"country_code": "IL"},
+    "ITALY": {"country_code": "IT"},
+    "JAMAICA": {"country_code": "JM"},
+    "JAPAN": {"country_code": "JP"},
+    "JERSEY": {"country_code": "JE"},
+    "JORDAN": {"country_code": "JO"},
+    "KAZAKHSTAN": {"country_code": "KZ"},
+    "KENYA": {"country_code": "KE"},
+    "KIRIBATI": {"country_code": "KI"},
+    "DEMOCRATIC_PEOPLE_S_REPUBLIC_OF_KOREA": {"country_code": "KP"},
+    "REPUBLIC_OF_KOREA": {"country_code": "KR"},
+    "KUWAIT": {"country_code": "KW"},
+    "KYRGYZSTAN": {"country_code": "KG"},
+    "LAO": {"country_code": "LA"},
+    "LATVIA": {"country_code": "LV"},
+    "LEBANON": {"country_code": "LB"},
+    "LESOTHO": {"country_code": "LS"},
+    "LIBERIA": {"country_code": "LR"},
+    "LIBYA": {"country_code": "LY"},
+    "LIECHTENSTEIN": {"country_code": "LI"},
+    "LITHUANIA": {"country_code": "LT"},
+    "LUXEMBOURG": {"country_code": "LU"},
+    "MACAO": {"country_code": "MO"},
+    "MACEDONIA": {"country_code": "MK"},
+    "MADAGASCAR": {"country_code": "MG"},
+    "MALAWI": {"country_code": "MW"},
+    "MALAYSIA": {"country_code": "MY"},
+    "MALDIVES": {"country_code": "MV"},
+    "MALI": {"country_code": "ML"},
+    "MALTA": {"country_code": "MT"},
+    "MARSHALL_ISLANDS": {"country_code": "MH"},
+    "MARTINIQUE": {"country_code": "MQ"},
+    "MAURITANIA": {"country_code": "MR"},
+    "MAURITIUS": {"country_code": "MU"},
+    "MAYOTTE": {"country_code": "YT"},
+    "MEXICO": {"country_code": "MX"},
+    "MICRONESIA": {"country_code": "FM"},
+    "MOLDOVA": {"country_code": "MD"},
+    "MONACO": {"country_code": "MC"},
+    "MONGOLIA": {"country_code": "MN"},
+    "MONTENEGRO": {"country_code": "ME"},
+    "MONTSERRAT": {"country_code": "MS"},
+    "MOROCCO": {"country_code": "MA"},
+    "MOZAMBIQUE": {"country_code": "MZ"},
+    "MYANMAR": {"country_code": "MM"},
+    "NAMIBIA": {"country_code": "NA"},
+    "NAURU": {"country_code": "NR"},
+    "NEPAL": {"country_code": "NP"},
+    "NETHERLANDS": {"country_code": "NL"},
+    "NEW_CALEDONIA": {"country_code": "NC"},
+    "NEW_ZEALAND": {"country_code": "NZ"},
+    "NICARAGUA": {"country_code": "NI"},
+    "NIGER": {"country_code": "NE"},
+    "NIGERIA": {"country_code": "NG"},
+    "NIUE": {"country_code": "NU"},
+    "NORFOLK_ISLAND": {"country_code": "NF"},
+    "NORTHERN_MARIANA_ISLANDS": {"country_code": "MP"},
+    "NORWAY": {"country_code": "NO"},
+    "OMAN": {"country_code": "OM"},
+    "PAKISTAN": {"country_code": "PK"},
+    "PALAU": {"country_code": "PW"},
+    "PALESTINE": {"country_code": "PS"},
+    "PANAMA": {"country_code": "PA"},
+    "PAPUA_NEW_GUINEA": {"country_code": "PG"},
+    "PARAGUAY": {"country_code": "PY"},
+    "PERU": {"country_code": "PE"},
+    "PHILIPPINES": {"country_code": "PH"},
+    "PITCAIRN": {"country_code": "PN"},
+    "POLAND": {"country_code": "PL"},
+    "PORTUGAL": {"country_code": "PT"},
+    "PUERTO_RICO": {"country_code": "PR"},
+    "QATAR": {"country_code": "QA"},
+    "RÉUNION": {"country_code": "RE"},
+    "ROMANIA": {"country_code": "RO"},
+    "RUSSIAN_FEDERATION": {"country_code": "RU"},
+    "RWANDA": {"country_code": "RW"},
+    "SAINT_BARTHELEMY": {"country_code": "BL"},
+    "SAINT_KITTS_AND_NEVIS": {"country_code": "KN"},
+    "SAINT_LUCIA": {"country_code": "LC"},
+    "SAINT_MARTIN": {"country_code": "MF"},
+    "SAINT_PIERRE_AND_MIQUELON": {"country_code": "PM"},
+    "SAINT_VINCENT_AND_THE_GRENADINES": {"country_code": "VC"},
+    "SAMOA": {"country_code": "WS"},
+    "SAN_MARINO": {"country_code": "SM"},
+    "SAO_TOME_AND_PRINCIPE": {"country_code": "ST"},
+    "SAUDI_ARABIA": {"country_code": "SA"},
+    "SENEGAL": {"country_code": "SN"},
+    "SERBIA": {"country_code": "RS"},
+    "SEYCHELLES": {"country_code": "SC"},
+    "SIERRA_LEONE": {"country_code": "SL"},
+    "SINGAPORE": {"country_code": "SG"},
+    "SINT_MAARTEN": {"country_code": "SX"},
+    "SLOVAKIA": {"country_code": "SK"},
+    "SLOVENIA": {"country_code": "SI"},
+    "SOLOMON_ISLANDS": {"country_code": "SB"},
+    "SOMALIA": {"country_code": "SO"},
+    "SOUTH_AFRICA": {"country_code": "ZA"},
+    "SOUTH_GEORGIA": {"country_code": "GS"},
+    "SOUTH_SUDAN": {"country_code": "SS"},
+    "SPAIN": {"country_code": "ES"},
+    "SRI_LANKA": {"country_code": "LK"},
+    "SUDAN": {"country_code": "SD"},
+    "SURINAME": {"country_code": "SR"},
+    "SVALBARD_AND_JAN_MAYEN": {"country_code": "SJ"},
+    "SWAZILAND": {"country_code": "SZ"},
+    "SWEDEN": {"country_code": "SE"},
+    "SWITZERLAND": {"country_code": "CH"},
+    "SYRIAN_ARAB_REPUBLIC": {"country_code": "SY"},
+    "TAIWAN": {"country_code": "TW"},
+    "TAJIKISTAN": {"country_code": "TJ"},
+    "TANZANIA": {"country_code": "TZ"},
+    "THAILAND": {"country_code": "TH"},
+    "TIMOR-LESTE": {"country_code": "TL"},
+    "TOGO": {"country_code": "TG"},
+    "TOKELAU": {"country_code": "TK"},
+    "TONGA": {"country_code": "TO"},
+    "TRINIDAD_AND_TOBAGO": {"country_code": "TT"},
+    "TUNISIA": {"country_code": "TN"},
+    "TURKEY": {"country_code": "TR"},
+    "TURKMENISTAN": {"country_code": "TM"},
+    "TURKS_AND_CAICOS_ISLANDS": {"country_code": "TC"},
+    "TUVALU": {"country_code": "TV"},
+    "UGANDA": {"country_code": "UG"},
+    "UKRAINE": {"country_code": "UA"},
+    "UNITED_ARAB_EMIRATES": {"country_code": "AE"},
+    "UNITED_KINGDOM": {"country_code": "GB"},
+    "UNITED_STATES": {"country_code": "US"},
+    "UNITED_STATES_MINOR_OUTLYING_ISLANDS": {"country_code": "UM"},
+    "URUGUAY": {"country_code": "UY"},
+    "UZBEKISTAN": {"country_code": "UZ"},
+    "VANUATU": {"country_code": "VU"},
+    "VENEZUELA": {"country_code": "VE"},
+    "VIETNAM": {"country_code": "VN"},
+    "VIRGIN_ISLANDS_BRITISH": {"country_code": "VG"},
+    "VIRGIN_ISLANDS_US": {"country_code": "VI"},
+    "WALLIS_AND_FUTUNA": {"country_code": "WF"},
+    "WESTERN_SAHARA": {"country_code": "EH"},
+    "YEMEN": {"country_code": "YE"},
+    "ZAMBIA": {"country_code": "ZM"},
+    "ZIMBABWE": {"country_code": "ZW"},
+    "NON_COUNTRY": {"country_code": "XX"},
+}
+
+ALL_CHANNELS_2G = {
+    1: {20, 40},
+    2: {20, 40},
+    3: {20, 40},
+    4: {20, 40},
+    5: {20, 40},
+    6: {20, 40},
+    7: {20, 40},
+    8: {20, 40},
+    9: {20, 40},
+    10: {20, 40},
+    11: {20, 40},
+    12: {20, 40},
+    13: {20, 40},
+    14: {20},
+}
+
+ALL_CHANNELS_5G = {
+    36: {20, 40, 80},
+    40: {20, 40, 80},
+    44: {20, 40, 80},
+    48: {20, 40, 80},
+    52: {20, 40, 80},
+    56: {20, 40, 80},
+    60: {20, 40, 80},
+    64: {20, 40, 80},
+    100: {20, 40, 80},
+    104: {20, 40, 80},
+    108: {20, 40, 80},
+    112: {20, 40, 80},
+    116: {20, 40, 80},
+    120: {20, 40, 80},
+    124: {20, 40, 80},
+    128: {20, 40, 80},
+    132: {20, 40, 80},
+    136: {20, 40, 80},
+    140: {20, 40, 80},
+    144: {20, 40, 80},
+    149: {20, 40, 80},
+    153: {20, 40, 80},
+    157: {20, 40, 80},
+    161: {20, 40, 80},
+    165: {20},
+}
+
+ALL_CHANNELS = ALL_CHANNELS_2G | ALL_CHANNELS_5G
+
+
+@unique
+class WnmFeature(Enum):
+    """Wireless Network Management (AKA 802.11v) features hostapd supports."""
+
+    TIME_ADVERTISEMENT = auto()
+    WNM_SLEEP_MODE = auto()
+    BSS_TRANSITION_MANAGEMENT = auto()
+    PROXY_ARP = auto()
+    IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST = auto()
diff --git a/packages/antlion/controllers/ap_lib/hostapd_security.py b/packages/antlion/controllers/ap_lib/hostapd_security.py
new file mode 100644
index 0000000..918ba26
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/hostapd_security.py
@@ -0,0 +1,408 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+import string
+from enum import Enum, StrEnum, auto, unique
+
+from antlion.controllers.ap_lib import hostapd_constants
+
+
+class SecurityModeInt(int, Enum):
+    """Possible values for hostapd's "wpa" config option.
+
+    The int value is a bit field that can enable WPA and/or WPA2.
+
+    bit0 = enable WPA defined by IEEE 802.11i/D3.0
+    bit1 = enable RNA (WPA2) defined by IEEE 802.11i/RSN
+    bit2 = enable WAPI (rejected/withdrawn)
+    bit3 = enable OSEN (ENT)
+    """
+
+    WEP = 0
+    WPA1 = 1
+    WPA2 = 2
+    WPA3 = 2  # same as wpa2 and wpa2/wpa3; distinguished by wpa_key_mgmt
+    MIXED = 3  # applies to wpa/wpa2 and wpa/wpa2/wpa3; distinguished by wpa_key_mgmt
+    ENT = 8
+
+    def __str__(self):
+        return str(self.value)
+
+
+@unique
+class KeyManagement(StrEnum):
+    SAE = "SAE"
+    WPA_PSK = "WPA-PSK"
+    WPA_PSK_SAE = "WPA-PSK SAE"
+    ENT = "WPA-EAP"
+
+
+# TODO(http://b/286584981): This is currently only being used for OpenWRT.
+# Investigate whether we can replace KeyManagement with OpenWRTEncryptionMode.
+@unique
+class OpenWRTEncryptionMode(StrEnum):
+    """Combination of Wi-Fi encryption mode and ciphers.
+
+    Only used by OpenWRT.
+
+    Besides the encryption mode, the encryption option also specifies the group and peer
+    ciphers to use. To override the cipher, the value of encryption must be given in the
+    form "mode+cipher". This enum contains all possible combinations.
+
+    See https://openwrt.org/docs/guide-user/network/wifi/basic#encryption_modes.
+    """
+
+    NONE = "none"
+    """No authentication, no ciphers"""
+    SAE = "sae"
+    """WPA3 Personal (SAE) using CCMP cipher"""
+    SAE_MIXED = "sae-mixed"
+    """WPA2/WPA3 Personal (PSK/SAE) mixed mode using CCMP cipher"""
+    PSK2_TKIP_CCMP = "psk2+tkip+ccmp"
+    """WPA2 Personal (PSK) using TKIP and CCMP ciphers"""
+    PSK2_TKIP_AES = "psk2+tkip+aes"
+    """WPA2 Personal (PSK) using TKIP and AES ciphers"""
+    PSK2_TKIP = "psk2+tkip"
+    """WPA2 Personal (PSK) using TKIP cipher"""
+    PSK2_CCMP = "psk2+ccmp"
+    """WPA2 Personal (PSK) using CCMP cipher"""
+    PSK2_AES = "psk2+aes"
+    """WPA2 Personal (PSK) using AES cipher"""
+    PSK2 = "psk2"
+    """WPA2 Personal (PSK) using CCMP cipher"""
+    PSK_TKIP_CCMP = "psk+tkip+ccmp"
+    """WPA Personal (PSK) using TKIP and CCMP ciphers"""
+    PSK_TKIP_AES = "psk+tkip+aes"
+    """WPA Personal (PSK) using TKIP and AES ciphers"""
+    PSK_TKIP = "psk+tkip"
+    """WPA Personal (PSK) using TKIP cipher"""
+    PSK_CCMP = "psk+ccmp"
+    """WPA Personal (PSK) using CCMP cipher"""
+    PSK_AES = "psk+aes"
+    """WPA Personal (PSK) using AES cipher"""
+    PSK = "psk"
+    """WPA Personal (PSK) using CCMP cipher"""
+    PSK_MIXED_TKIP_CCMP = "psk-mixed+tkip+ccmp"
+    """WPA/WPA2 Personal (PSK) mixed mode using TKIP and CCMP ciphers"""
+    PSK_MIXED_TKIP_AES = "psk-mixed+tkip+aes"
+    """WPA/WPA2 Personal (PSK) mixed mode using TKIP and AES ciphers"""
+    PSK_MIXED_TKIP = "psk-mixed+tkip"
+    """WPA/WPA2 Personal (PSK) mixed mode using TKIP cipher"""
+    PSK_MIXED_CCMP = "psk-mixed+ccmp"
+    """WPA/WPA2 Personal (PSK) mixed mode using CCMP cipher"""
+    PSK_MIXED_AES = "psk-mixed+aes"
+    """WPA/WPA2 Personal (PSK) mixed mode using AES cipher"""
+    PSK_MIXED = "psk-mixed"
+    """WPA/WPA2 Personal (PSK) mixed mode using CCMP cipher"""
+    WEP = "wep"
+    """defaults to “open system” authentication aka wep+open using RC4 cipher"""
+    WEP_OPEN = "wep+open"
+    """“open system” authentication using RC4 cipher"""
+    WEP_SHARED = "wep+shared"
+    """“shared key” authentication using RC4 cipher"""
+    WPA3 = "wpa3"
+    """WPA3 Enterprise using CCMP cipher"""
+    WPA3_MIXED = "wpa3-mixed"
+    """WPA3/WPA2 Enterprise using CCMP cipher"""
+    WPA2_TKIP_CCMP = "wpa2+tkip+ccmp"
+    """WPA2 Enterprise using TKIP and CCMP ciphers"""
+    WPA2_TKIP_AES = "wpa2+tkip+aes"
+    """WPA2 Enterprise using TKIP and AES ciphers"""
+    WPA2_CCMP = "wpa2+ccmp"
+    """WPA2 Enterprise using CCMP cipher"""
+    WPA2_AES = "wpa2+aes'"
+    """WPA2 Enterprise using AES cipher"""
+    WPA2 = "wpa2"
+    """WPA2 Enterprise using CCMP cipher"""
+    WPA2_TKIP = "wpa2+tkip"
+    """WPA2 Enterprise using TKIP cipher"""
+    WPA_TKIP_CCMP = "wpa+tkip+ccmp"
+    """WPA Enterprise using TKIP and CCMP ciphers"""
+    WPA_TKIP_AES = "wpa+tkip+aes"
+    """WPA Enterprise using TKIP and AES ciphers"""
+    WPA_CCMP = "wpa+ccmp"
+    """WPA Enterprise using CCMP cipher"""
+    WPA_AES = "wpa+aes"
+    """WPA Enterprise using AES cipher"""
+    WPA_TKIP = "wpa+tkip"
+    """WPA Enterprise using TKIP cipher"""
+    WPA = "wpa"
+    """WPA Enterprise using CCMP cipher"""
+    WPA_MIXED_TKIP_CCMP = "wpa-mixed+tkip+ccmp"
+    """WPA/WPA2 Enterprise mixed mode using TKIP and CCMP ciphers"""
+    WPA_MIXED_TKIP_AES = "wpa-mixed+tkip+aes"
+    """WPA/WPA2 Enterprise mixed mode using TKIP and AES ciphers"""
+    WPA_MIXED_TKIP = "wpa-mixed+tkip"
+    """WPA/WPA2 Enterprise mixed mode using TKIP cipher"""
+    WPA_MIXED_CCMP = "wpa-mixed+ccmp"
+    """WPA/WPA2 Enterprise mixed mode using CCMP cipher"""
+    WPA_MIXED_AES = "wpa-mixed+aes"
+    """WPA/WPA2 Enterprise mixed mode using AES cipher"""
+    WPA_MIXED = "wpa-mixed"
+    """WPA/WPA2 Enterprise mixed mode using CCMP cipher"""
+    OWE = "owe"
+    """Opportunistic Wireless Encryption (OWE) using CCMP cipher"""
+
+
+@unique
+class FuchsiaSecurityType(StrEnum):
+    """Fuchsia supported security types.
+
+    Defined by the fuchsia.wlan.policy.SecurityType FIDL.
+
+    https://cs.opensource.google/fuchsia/fuchsia/+/main:sdk/fidl/fuchsia.wlan.policy/types.fidl
+    """
+
+    NONE = "none"
+    WEP = "wep"
+    WPA = "wpa"
+    WPA2 = "wpa2"
+    WPA3 = "wpa3"
+
+
+@unique
+class SecurityMode(StrEnum):
+    OPEN = auto()
+    WEP = auto()
+    WPA = auto()
+    WPA2 = auto()
+    WPA_WPA2 = auto()
+    WPA3 = auto()
+    WPA2_WPA3 = auto()
+    WPA_WPA2_WPA3 = auto()
+    ENT = auto()
+
+    def security_mode_int(self) -> SecurityModeInt:
+        match self:
+            case SecurityMode.OPEN:
+                raise TypeError("Open security doesn't have a SecurityModeInt")
+            case SecurityMode.WEP:
+                return SecurityModeInt.WEP
+            case SecurityMode.WPA:
+                return SecurityModeInt.WPA1
+            case SecurityMode.WPA2:
+                return SecurityModeInt.WPA2
+            case SecurityMode.WPA_WPA2:
+                return SecurityModeInt.MIXED
+            case SecurityMode.WPA3:
+                return SecurityModeInt.WPA3
+            case SecurityMode.WPA2_WPA3:
+                return SecurityModeInt.WPA3
+            case SecurityMode.WPA_WPA2_WPA3:
+                return SecurityModeInt.MIXED
+            case SecurityMode.ENT:
+                return SecurityModeInt.ENT
+
+    def key_management(self) -> KeyManagement | None:
+        match self:
+            case SecurityMode.OPEN:
+                return None
+            case SecurityMode.WEP:
+                return None
+            case SecurityMode.WPA:
+                return KeyManagement.WPA_PSK
+            case SecurityMode.WPA2:
+                return KeyManagement.WPA_PSK
+            case SecurityMode.WPA_WPA2:
+                return KeyManagement.WPA_PSK
+            case SecurityMode.WPA3:
+                return KeyManagement.SAE
+            case SecurityMode.WPA2_WPA3:
+                return KeyManagement.WPA_PSK_SAE
+            case SecurityMode.WPA_WPA2_WPA3:
+                return KeyManagement.WPA_PSK_SAE
+            case SecurityMode.ENT:
+                return KeyManagement.ENT
+
+    def fuchsia_security_type(self) -> FuchsiaSecurityType:
+        match self:
+            case SecurityMode.OPEN:
+                return FuchsiaSecurityType.NONE
+            case SecurityMode.WEP:
+                return FuchsiaSecurityType.WEP
+            case SecurityMode.WPA:
+                return FuchsiaSecurityType.WPA
+            case SecurityMode.WPA2:
+                return FuchsiaSecurityType.WPA2
+            case SecurityMode.WPA_WPA2:
+                return FuchsiaSecurityType.WPA2
+            case SecurityMode.WPA3:
+                return FuchsiaSecurityType.WPA3
+            case SecurityMode.WPA2_WPA3:
+                return FuchsiaSecurityType.WPA3
+            case SecurityMode.WPA_WPA2_WPA3:
+                return FuchsiaSecurityType.WPA3
+            case SecurityMode.ENT:
+                raise NotImplementedError(
+                    f'Fuchsia has not implemented support for security mode "{self}"'
+                )
+
+    def is_wpa3(self) -> bool:
+        match self:
+            case SecurityMode.OPEN:
+                return False
+            case SecurityMode.WEP:
+                return False
+            case SecurityMode.WPA:
+                return False
+            case SecurityMode.WPA2:
+                return False
+            case SecurityMode.WPA_WPA2:
+                return False
+            case SecurityMode.WPA3:
+                return True
+            case SecurityMode.WPA2_WPA3:
+                return True
+            case SecurityMode.WPA_WPA2_WPA3:
+                return True
+            case SecurityMode.ENT:
+                return False
+        raise TypeError("Unknown security mode")
+
+
+class Security(object):
+    """The Security class for hostapd representing some of the security
+    settings that are allowed in hostapd.  If needed more can be added.
+    """
+
+    def __init__(
+        self,
+        security_mode: SecurityMode = SecurityMode.OPEN,
+        password: str | None = None,
+        wpa_cipher: str | None = hostapd_constants.WPA_DEFAULT_CIPHER,
+        wpa2_cipher: str | None = hostapd_constants.WPA2_DEFAULT_CIPER,
+        wpa_group_rekey: int = hostapd_constants.WPA_GROUP_KEY_ROTATION_TIME,
+        wpa_strict_rekey: bool = hostapd_constants.WPA_STRICT_REKEY_DEFAULT,
+        wep_default_key: int = hostapd_constants.WEP_DEFAULT_KEY,
+        radius_server_ip: str | None = None,
+        radius_server_port: int | None = None,
+        radius_server_secret: str | None = None,
+    ) -> None:
+        """Gather all of the security settings for WPA-PSK.  This could be
+           expanded later.
+
+        Args:
+            security_mode: Type of security mode.
+            password: The PSK or passphrase for the security mode.
+            wpa_cipher: The cipher to be used for wpa.
+                        Options: TKIP, CCMP, TKIP CCMP
+                        Default: TKIP
+            wpa2_cipher: The cipher to be used for wpa2.
+                         Options: TKIP, CCMP, TKIP CCMP
+                         Default: CCMP
+            wpa_group_rekey: How often to refresh the GTK regardless of network
+                             changes.
+                             Options: An integer in seconds, None
+                             Default: 600 seconds
+            wpa_strict_rekey: Whether to do a group key update when client
+                              leaves the network or not.
+                              Options: True, False
+                              Default: True
+            wep_default_key: The wep key number to use when transmitting.
+            radius_server_ip: Radius server IP for Enterprise auth.
+            radius_server_port: Radius server port for Enterprise auth.
+            radius_server_secret: Radius server secret for Enterprise auth.
+        """
+        self.security_mode = security_mode
+        self.wpa_cipher = wpa_cipher
+        self.wpa2_cipher = wpa2_cipher
+        self.wpa_group_rekey = wpa_group_rekey
+        self.wpa_strict_rekey = wpa_strict_rekey
+        self.wep_default_key = wep_default_key
+        self.radius_server_ip = radius_server_ip
+        self.radius_server_port = radius_server_port
+        self.radius_server_secret = radius_server_secret
+        if password:
+            if self.security_mode is SecurityMode.WEP:
+                if len(password) in hostapd_constants.WEP_STR_LENGTH:
+                    self.password = f'"{password}"'
+                elif len(password) in hostapd_constants.WEP_HEX_LENGTH and all(
+                    c in string.hexdigits for c in password
+                ):
+                    self.password = password
+                else:
+                    raise ValueError(
+                        "WEP key must be a hex string of %s characters"
+                        % hostapd_constants.WEP_HEX_LENGTH
+                    )
+            else:
+                if (
+                    len(password) < hostapd_constants.MIN_WPA_PSK_LENGTH
+                    or len(password) > hostapd_constants.MAX_WPA_PSK_LENGTH
+                ):
+                    raise ValueError(
+                        "Password must be a minumum of %s characters and a maximum of %s"
+                        % (
+                            hostapd_constants.MIN_WPA_PSK_LENGTH,
+                            hostapd_constants.MAX_WPA_PSK_LENGTH,
+                        )
+                    )
+                else:
+                    self.password = password
+
+    def __str__(self) -> str:
+        return self.security_mode
+
+    def generate_dict(self) -> dict[str, str | int]:
+        """Returns: an ordered dictionary of settings"""
+        if self.security_mode is SecurityMode.OPEN:
+            return {}
+
+        settings: dict[str, str | int] = collections.OrderedDict()
+
+        if self.security_mode is SecurityMode.WEP:
+            settings["wep_default_key"] = self.wep_default_key
+            settings[f"wep_key{self.wep_default_key}"] = self.password
+        elif self.security_mode == SecurityMode.ENT:
+            if self.radius_server_ip is not None:
+                settings["auth_server_addr"] = self.radius_server_ip
+            if self.radius_server_port is not None:
+                settings["auth_server_port"] = self.radius_server_port
+            if self.radius_server_secret is not None:
+                settings["auth_server_shared_secret"] = self.radius_server_secret
+            settings["wpa_key_mgmt"] = hostapd_constants.ENT_KEY_MGMT
+            settings["ieee8021x"] = hostapd_constants.IEEE8021X
+            settings["wpa"] = hostapd_constants.WPA2
+        else:
+            settings["wpa"] = self.security_mode.security_mode_int().value
+            if len(self.password) == hostapd_constants.MAX_WPA_PSK_LENGTH:
+                settings["wpa_psk"] = self.password
+            else:
+                settings["wpa_passphrase"] = self.password
+            # For wpa, wpa/wpa2, and wpa/wpa2/wpa3, add wpa_pairwise
+            if self.wpa_cipher and (
+                self.security_mode is SecurityMode.WPA
+                or self.security_mode is SecurityMode.WPA_WPA2
+                or self.security_mode is SecurityMode.WPA_WPA2_WPA3
+            ):
+                settings["wpa_pairwise"] = self.wpa_cipher
+            # For wpa/wpa2, wpa2, wpa3, and wpa2/wpa3, and wpa/wpa2, wpa3, add rsn_pairwise
+            if self.wpa2_cipher and (
+                self.security_mode is SecurityMode.WPA_WPA2
+                or self.security_mode is SecurityMode.WPA2
+                or self.security_mode is SecurityMode.WPA2_WPA3
+                or self.security_mode is SecurityMode.WPA3
+            ):
+                settings["rsn_pairwise"] = self.wpa2_cipher
+            # Add wpa_key_mgmt based on security mode string
+            wpa_key_mgmt = self.security_mode.key_management()
+            if wpa_key_mgmt is not None:
+                settings["wpa_key_mgmt"] = str(wpa_key_mgmt)
+            if self.wpa_group_rekey:
+                settings["wpa_group_rekey"] = self.wpa_group_rekey
+            if self.wpa_strict_rekey:
+                settings["wpa_strict_rekey"] = hostapd_constants.WPA_STRICT_REKEY
+
+        return settings
diff --git a/packages/antlion/controllers/ap_lib/hostapd_utils.py b/packages/antlion/controllers/ap_lib/hostapd_utils.py
new file mode 100644
index 0000000..060777e
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/hostapd_utils.py
@@ -0,0 +1,97 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from antlion import utils
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+
+
+def generate_random_password(
+    security_mode: SecurityMode = SecurityMode.OPEN,
+    length: int | None = None,
+    hex: int | None = None,
+) -> str:
+    """Generates a random password. Defaults to an 8 character ASCII password.
+
+    Args:
+        security_mode: Used to determine if length should be WEP compatible
+            (useful for generated tests to simply pass in security mode)
+        length: Length of password to generate. Defaults to 8, unless
+            security_mode is WEP, then 13
+        hex: If True, generates a hex string, else ascii
+    """
+    if hex:
+        generator_func = utils.rand_hex_str
+    else:
+        generator_func = utils.rand_ascii_str
+
+    if length:
+        return generator_func(length)
+    if security_mode is SecurityMode.WEP:
+        return generator_func(hostapd_constants.WEP_DEFAULT_STR_LENGTH)
+    else:
+        return generator_func(hostapd_constants.MIN_WPA_PSK_LENGTH)
+
+
+def verify_interface(interface: str, valid_interfaces: list[str]) -> None:
+    """Raises error if interface is missing or invalid
+
+    Args:
+        interface: interface name
+        valid_interfaces: valid interface names
+    """
+    if interface not in valid_interfaces:
+        raise ValueError(f"Invalid interface name was passed: {interface}")
+
+
+def verify_security_mode(
+    security_profile: Security, valid_security_modes: list[SecurityMode]
+) -> None:
+    """Raises error if security mode is not in list of valid security modes.
+
+    Args:
+        security_profile: Security to verify
+        valid_security_modes: Valid security modes for a profile.
+    """
+    if security_profile.security_mode not in valid_security_modes:
+        raise ValueError(
+            f"Invalid Security Mode: {security_profile.security_mode}; "
+            f"Valid Security Modes for this profile: {valid_security_modes}"
+        )
+
+
+def verify_cipher(security_profile: Security, valid_ciphers: list[str]) -> None:
+    """Raise error if cipher is not in list of valid ciphers.
+
+    Args:
+        security_profile: Security profile to verify
+        valid_ciphers: A list of valid ciphers for security_profile.
+    """
+    if security_profile.security_mode is SecurityMode.OPEN:
+        raise ValueError("Security mode is open.")
+    elif security_profile.security_mode is SecurityMode.WPA:
+        if security_profile.wpa_cipher not in valid_ciphers:
+            raise ValueError(
+                f"Invalid WPA Cipher: {security_profile.wpa_cipher}. "
+                f"Valid WPA Ciphers for this profile: {valid_ciphers}"
+            )
+    elif security_profile.security_mode is SecurityMode.WPA2:
+        if security_profile.wpa2_cipher not in valid_ciphers:
+            raise ValueError(
+                f"Invalid WPA2 Cipher: {security_profile.wpa2_cipher}. "
+                f"Valid WPA2 Ciphers for this profile: {valid_ciphers}"
+            )
+    else:
+        raise ValueError(f"Invalid Security Mode: {security_profile.security_mode}")
diff --git a/packages/antlion/controllers/ap_lib/radio_measurement.py b/packages/antlion/controllers/ap_lib/radio_measurement.py
new file mode 100644
index 0000000..5c7f2e0
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/radio_measurement.py
@@ -0,0 +1,246 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from enum import IntEnum, unique
+
+
+@unique
+class ApReachability(IntEnum):
+    """Neighbor Report AP Reachability values.
+
+    See IEEE 802.11-2020 Figure 9-172.
+    """
+
+    NOT_REACHABLE = 1
+    UNKNOWN = 2
+    REACHABLE = 3
+
+
+class BssidInformationCapabilities:
+    """Representation of Neighbor Report BSSID Information Capabilities.
+
+    See IEEE 802.11-2020 Figure 9-338 and 9.4.1.4.
+    """
+
+    def __init__(
+        self,
+        spectrum_management: bool = False,
+        qos: bool = False,
+        apsd: bool = False,
+        radio_measurement: bool = False,
+    ):
+        """Create a capabilities object.
+
+        Args:
+            spectrum_management: whether spectrum management is required.
+            qos: whether QoS is implemented.
+            apsd: whether APSD is implemented.
+            radio_measurement: whether radio measurement is activated.
+        """
+        self._spectrum_management = spectrum_management
+        self._qos = qos
+        self._apsd = apsd
+        self._radio_measurement = radio_measurement
+
+    def __index__(self) -> int:
+        """Convert to numeric representation of the field's bits."""
+        return (
+            self.spectrum_management << 5
+            | self.qos << 4
+            | self.apsd << 3
+            | self.radio_measurement << 2
+        )
+
+    @property
+    def spectrum_management(self) -> bool:
+        return self._spectrum_management
+
+    @property
+    def qos(self) -> bool:
+        return self._qos
+
+    @property
+    def apsd(self) -> bool:
+        return self._apsd
+
+    @property
+    def radio_measurement(self) -> bool:
+        return self._radio_measurement
+
+
+class BssidInformation:
+    """Representation of Neighbor Report BSSID Information field.
+
+    BssidInformation contains info about a neighboring AP, to be included in a
+    neighbor report element. See IEEE 802.11-2020 Figure 9-337.
+    """
+
+    def __init__(
+        self,
+        ap_reachability: ApReachability = ApReachability.UNKNOWN,
+        security: bool = False,
+        key_scope: bool = False,
+        capabilities: BssidInformationCapabilities = BssidInformationCapabilities(),
+        mobility_domain: bool = False,
+        high_throughput: bool = False,
+        very_high_throughput: bool = False,
+        ftm: bool = False,
+    ):
+        """Create a BSSID Information object for a neighboring AP.
+
+        Args:
+            ap_reachability: whether this AP is reachable by the STA that
+                requested the neighbor report.
+            security: whether this AP is known to support the same security
+                provisioning as used by the STA in its current association.
+            key_scope: whether this AP is known to have the same
+                authenticator as the AP sending the report.
+            capabilities: selected capabilities of this AP.
+            mobility_domain: whether the AP is including an MDE in its beacon
+                frames and the contents of that MDE are identical to the MDE
+                advertised by the AP sending the report.
+            high_throughput: whether the AP is an HT AP including the HT
+                Capabilities element in its Beacons, and that the contents of
+                that HT capabilities element are identical to the HT
+                capabilities element advertised by the AP sending the report.
+            very_high_throughput: whether the AP is a VHT AP and the VHT
+                capabilities element, if included as a subelement, is
+                identical in content to the VHT capabilities element included
+                in the AP’s beacon.
+            ftm: whether the AP is known to have the Fine Timing Measurement
+                Responder extended capability.
+        """
+        self._ap_reachability = ap_reachability
+        self._security = security
+        self._key_scope = key_scope
+        self._capabilities = capabilities
+        self._mobility_domain = mobility_domain
+        self._high_throughput = high_throughput
+        self._very_high_throughput = very_high_throughput
+        self._ftm = ftm
+
+    def __index__(self) -> int:
+        """Convert to numeric representation of the field's bits."""
+        return (
+            self._ap_reachability << 30
+            | self.security << 29
+            | self.key_scope << 28
+            | int(self.capabilities) << 22
+            | self.mobility_domain << 21
+            | self.high_throughput << 20
+            | self.very_high_throughput << 19
+            | self.ftm << 18
+        )
+
+    @property
+    def security(self) -> bool:
+        return self._security
+
+    @property
+    def key_scope(self) -> bool:
+        return self._key_scope
+
+    @property
+    def capabilities(self) -> BssidInformationCapabilities:
+        return self._capabilities
+
+    @property
+    def mobility_domain(self) -> bool:
+        return self._mobility_domain
+
+    @property
+    def high_throughput(self) -> bool:
+        return self._high_throughput
+
+    @property
+    def very_high_throughput(self) -> bool:
+        return self._very_high_throughput
+
+    @property
+    def ftm(self) -> bool:
+        return self._ftm
+
+
+@unique
+class PhyType(IntEnum):
+    """PHY type values, see dot11PhyType in 802.11-2020 Annex C."""
+
+    DSSS = 2
+    OFDM = 4
+    HRDSS = 5
+    ERP = 6
+    HT = 7
+    DMG = 8
+    VHT = 9
+    TVHT = 10
+    S1G = 11
+    CDMG = 12
+    CMMG = 13
+
+
+class NeighborReportElement:
+    """Representation of Neighbor Report element.
+
+    See IEEE 802.11-2020 9.4.2.36.
+    """
+
+    def __init__(
+        self,
+        bssid: str,
+        bssid_information: BssidInformation,
+        operating_class: int,
+        channel_number: int,
+        phy_type: PhyType,
+    ):
+        """Create a neighbor report element.
+
+        Args:
+            bssid: MAC address of the neighbor.
+            bssid_information: BSSID Information of the neigbor.
+            operating_class: operating class of the neighbor.
+            channel_number: channel number of the neighbor.
+            phy_type: dot11PhyType of the neighbor.
+        """
+        self._bssid = bssid
+        self._bssid_information = bssid_information
+
+        # Operating Class, IEEE 802.11-2020 Annex E.
+        self._operating_class = operating_class
+
+        self._channel_number = channel_number
+
+        # PHY Type, IEEE 802.11-2020 Annex C.
+        self._phy_type = phy_type
+
+    @property
+    def bssid(self) -> str:
+        return self._bssid
+
+    @property
+    def bssid_information(self) -> BssidInformation:
+        return self._bssid_information
+
+    @property
+    def operating_class(self) -> int:
+        return self._operating_class
+
+    @property
+    def channel_number(self) -> int:
+        return self._channel_number
+
+    @property
+    def phy_type(self) -> PhyType:
+        return self._phy_type
diff --git a/packages/antlion/controllers/ap_lib/radvd.py b/packages/antlion/controllers/ap_lib/radvd.py
new file mode 100644
index 0000000..07af019
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/radvd.py
@@ -0,0 +1,212 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import shlex
+import tempfile
+import time
+from typing import Any
+
+from antlion.controllers.ap_lib.radvd_config import RadvdConfig
+from antlion.controllers.utils_lib.commands import shell
+from antlion.libs.proc import job
+
+
+class Error(Exception):
+    """An error caused by radvd."""
+
+
+class Radvd(object):
+    """Manages the radvd program.
+
+    https://en.wikipedia.org/wiki/Radvd
+    This implements the Router Advertisement Daemon of IPv6 router addresses
+    and IPv6 routing prefixes using the Neighbor Discovery Protocol.
+
+    Attributes:
+        config: The radvd configuration that is being used.
+    """
+
+    def __init__(
+        self,
+        runner: Any,
+        interface: str,
+        working_dir: str | None = None,
+        radvd_binary: str | None = None,
+    ) -> None:
+        """
+        Args:
+            runner: Object that has run_async and run methods for executing
+                    shell commands (e.g. connection.SshConnection)
+            interface: Name of the interface to use (eg. wlan0).
+            working_dir: Directory to work out of.
+            radvd_binary: Location of the radvd binary
+        """
+        if not radvd_binary:
+            logging.debug(
+                "No radvd binary specified.  " "Assuming radvd is in the path."
+            )
+            radvd_binary = "radvd"
+        else:
+            logging.debug(f"Using radvd binary located at {radvd_binary}")
+        if working_dir is None and runner == job.run:
+            working_dir = tempfile.gettempdir()
+        else:
+            working_dir = "/tmp"
+        self._radvd_binary = radvd_binary
+        self._runner = runner
+        self._interface = interface
+        self._working_dir = working_dir
+        self.config: RadvdConfig | None = None
+        self._shell = shell.ShellCommand(runner, working_dir)
+        self._log_file = f"{working_dir}/radvd-{self._interface}.log"
+        self._config_file = f"{working_dir}/radvd-{self._interface}.conf"
+        self._pid_file = f"{working_dir}/radvd-{self._interface}.pid"
+        self._ps_identifier = f"{self._radvd_binary}.*{self._config_file}"
+
+    def start(self, config: RadvdConfig, timeout: int = 60) -> None:
+        """Starts radvd
+
+        Starts the radvd daemon and runs it in the background.
+
+        Args:
+            config: Configs to start the radvd with.
+            timeout: Time to wait for radvd  to come up.
+
+        Returns:
+            True if the daemon could be started. Note that the daemon can still
+            start and not work. Invalid configurations can take a long amount
+            of time to be produced, and because the daemon runs indefinitely
+            it's impossible to wait on. If you need to check if configs are ok
+            then periodic checks to is_running and logs should be used.
+        """
+        if self.is_alive():
+            self.stop()
+
+        self.config = config
+
+        self._shell.delete_file(self._log_file)
+        self._shell.delete_file(self._config_file)
+        self._write_configs(self.config)
+
+        command = (
+            f"{self._radvd_binary} -C {shlex.quote(self._config_file)} "
+            f"-p {shlex.quote(self._pid_file)} -m logfile -d 5 "
+            f'-l {self._log_file} > "{self._log_file}" 2>&1'
+        )
+        self._runner.run_async(command)
+
+        try:
+            self._wait_for_process(timeout=timeout)
+        except Error:
+            self.stop()
+            raise
+
+    def stop(self):
+        """Kills the daemon if it is running."""
+        self._shell.kill(self._ps_identifier)
+
+    def is_alive(self):
+        """
+        Returns:
+            True if the daemon is running.
+        """
+        return self._shell.is_alive(self._ps_identifier)
+
+    def pull_logs(self) -> str:
+        """Pulls the log files from where radvd is running.
+
+        Returns:
+            A string of the radvd logs.
+        """
+        # TODO: Auto pulling of logs when stop is called.
+        return self._shell.read_file(self._log_file)
+
+    def _wait_for_process(self, timeout: int = 60) -> None:
+        """Waits for the process to come up.
+
+        Waits until the radvd process is found running, or there is
+        a timeout. If the program never comes up then the log file
+        will be scanned for errors.
+
+        Raises: See _scan_for_errors
+        """
+        start_time = time.time()
+        while time.time() - start_time < timeout and not self.is_alive():
+            time.sleep(0.1)
+            self._scan_for_errors(False)
+        self._scan_for_errors(True)
+
+    def _scan_for_errors(self, should_be_up: bool) -> None:
+        """Scans the radvd log for any errors.
+
+        Args:
+            should_be_up: If true then radvd program is expected to be alive.
+                          If it is found not alive while this is true an error
+                          is thrown.
+
+        Raises:
+            Error: Raised when a radvd error is found.
+        """
+        # Store this so that all other errors have priority.
+        is_dead = not self.is_alive()
+
+        exited_prematurely = self._shell.search_file("Exiting", self._log_file)
+        if exited_prematurely:
+            raise Error("Radvd exited prematurely.", self)
+        if should_be_up and is_dead:
+            raise Error("Radvd failed to start", self)
+
+    def _write_configs(self, config: RadvdConfig) -> None:
+        """Writes the configs to the radvd config file.
+
+        Args:
+            config: a RadvdConfig object.
+        """
+        self._shell.delete_file(self._config_file)
+        conf = config.package_configs()
+        lines = ["interface %s {" % self._interface]
+        for interface_option_key, interface_option in conf["interface_options"].items():
+            lines.append(f"\t{str(interface_option_key)} {str(interface_option)};")
+        lines.append(f"\tprefix {conf['prefix']}")
+        lines.append("\t{")
+        for prefix_option in conf["prefix_options"].items():
+            lines.append(f"\t\t{' '.join(map(str, prefix_option))};")
+        lines.append("\t};")
+        if conf["clients"]:
+            lines.append("\tclients")
+            lines.append("\t{")
+            for client in conf["clients"]:
+                lines.append(f"\t\t{client};")
+            lines.append("\t};")
+        if conf["route"]:
+            lines.append("\troute %s {" % conf["route"])
+            for route_option in conf["route_options"].items():
+                lines.append(f"\t\t{' '.join(map(str, route_option))};")
+            lines.append("\t};")
+        if conf["rdnss"]:
+            lines.append(
+                "\tRDNSS %s {" % " ".join([str(elem) for elem in conf["rdnss"]])
+            )
+            for rdnss_option in conf["rdnss_options"].items():
+                lines.append(f"\t\t{' '.join(map(str, rdnss_option))};")
+            lines.append("\t};")
+        lines.append("};")
+        output_config = "\n".join(lines)
+        logging.info(f"Writing {self._config_file}")
+        logging.debug("******************Start*******************")
+        logging.debug(f"\n{output_config}")
+        logging.debug("*******************End********************")
+
+        self._shell.write_file(self._config_file, output_config)
diff --git a/packages/antlion/controllers/ap_lib/radvd_config.py b/packages/antlion/controllers/ap_lib/radvd_config.py
new file mode 100644
index 0000000..d3d6d97
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/radvd_config.py
@@ -0,0 +1,313 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+from typing import Any
+
+from antlion.controllers.ap_lib import radvd_constants
+
+
+class RadvdConfig(object):
+    """The root settings for the router advertisement daemon.
+
+    All the settings for a router advertisement daemon.
+    """
+
+    def __init__(
+        self,
+        prefix: str = radvd_constants.DEFAULT_PREFIX,
+        clients: list[str] = [],
+        route: Any | None = None,
+        rdnss: list[str] = [],
+        ignore_if_missing: str | None = None,
+        adv_send_advert: str = radvd_constants.ADV_SEND_ADVERT_ON,
+        unicast_only: str | None = None,
+        max_rtr_adv_interval: int | None = None,
+        min_rtr_adv_interval: int | None = None,
+        min_delay_between_ras: int | None = None,
+        adv_managed_flag: str | None = None,
+        adv_other_config_flag: str | None = None,
+        adv_link_mtu: int | None = None,
+        adv_reachable_time: int | None = None,
+        adv_retrans_timer: int | None = None,
+        adv_cur_hop_limit: int | None = None,
+        adv_default_lifetime: int | None = None,
+        adv_default_preference: str | None = None,
+        adv_source_ll_address: str | None = None,
+        adv_home_agent_flag: str | None = None,
+        adv_home_agent_info: str | None = None,
+        home_agent_lifetime: int | None = None,
+        home_agent_preference: int | None = None,
+        adv_mob_rtr_support_flag: str | None = None,
+        adv_interval_opt: str | None = None,
+        adv_on_link: str = radvd_constants.ADV_ON_LINK_ON,
+        adv_autonomous: str = radvd_constants.ADV_AUTONOMOUS_ON,
+        adv_router_addr: str | None = None,
+        adv_valid_lifetime: int | None = None,
+        adv_preferred_lifetime: int | None = None,
+        base_6to4_interface: str | None = None,
+        adv_route_lifetime: int | None = None,
+        adv_route_preference: str | None = None,
+        adv_rdnss_preference: int | None = None,
+        adv_rdnss_open: str | None = None,
+        adv_rdnss_lifetime: int | None = None,
+    ) -> None:
+        """Construct a RadvdConfig.
+
+        Args:
+            prefix: IPv6 prefix and length, ie fd::/64
+            clients: A list of IPv6 link local addresses that will be the only
+                clients served.  All other IPv6 addresses will be ignored if
+                this list is present.
+            route: A route for the router advertisement with prefix.
+            rdnss: A list of recursive DNS servers
+            ignore_if_missing: A flag indicating whether or not the interface
+                is ignored if it does not exist at start-up. By default,
+                radvd exits.
+            adv_send_advert: A flag indicating whether or not the router sends
+                periodic router advertisements and responds to router
+                solicitations.
+            unicast_only: Indicates that the interface link type only supports
+                unicast.
+            max_rtr_adv_interval:The maximum time allowed between sending
+                unsolicited multicast router advertisements from the interface,
+                in seconds. Must be no less than 4 seconds and no greater than
+                1800 seconds.
+            min_rtr_adv_interval: The minimum time allowed between sending
+                unsolicited multicast router advertisements from the interface,
+                in seconds. Must be no less than 3 seconds and no greater than
+                0.75 * max_rtr_adv_interval.
+            min_delay_between_ras: The minimum time allowed between sending
+                multicast router advertisements from the interface, in seconds.,
+            adv_managed_flag: When set, hosts use the administered (stateful)
+                protocol for address autoconfiguration in addition to any
+                addresses autoconfigured using stateless address
+                autoconfiguration. The use of this flag is described in
+                RFC 4862.
+            adv_other_config_flag: When set, hosts use the administered
+                (stateful) protocol for autoconfiguration of other (non-address)
+                information. The use of this flag is described in RFC 4862.
+            adv_link_mtu: The MTU option is used in router advertisement
+                messages to insure that all nodes on a link use the same MTU
+                value in those cases where the link MTU is not well known.
+            adv_reachable_time: The time, in milliseconds, that a node assumes
+                a neighbor is reachable after having received a reachability
+                confirmation. Used by the Neighbor Unreachability Detection
+                algorithm (see Section 7.3 of RFC 4861). A value of zero means
+                unspecified (by this router).
+            adv_retrans_timer: The time, in milliseconds, between retransmitted
+                Neighbor Solicitation messages. Used by address resolution and
+                the Neighbor Unreachability Detection algorithm (see Sections
+                7.2 and 7.3 of RFC 4861). A value of zero means unspecified
+                (by this router).
+            adv_cur_hop_limit: The default value that should be placed in the
+                Hop Count field of the IP header for outgoing (unicast) IP
+                packets. The value should be set to the current diameter of the
+                Internet. The value zero means unspecified (by this router).
+            adv_default_lifetime: The lifetime associated with the default
+                router in units of seconds. The maximum value corresponds to
+                18.2 hours. A lifetime of 0 indicates that the router is not a
+                default router and should not appear on the default router list.
+                The router lifetime applies only to the router's usefulness as
+                a default router; it does not apply to information contained in
+                other message fields or options. Options that need time limits
+                for their information include their own lifetime fields.
+            adv_default_preference: The preference associated with the default
+                router, as either "low", "medium", or "high".
+            adv_source_ll_address: When set, the link-layer address of the
+                outgoing interface is included in the RA.
+            adv_home_agent_flag: When set, indicates that sending router is able
+                to serve as Mobile IPv6 Home Agent. When set, minimum limits
+                specified by Mobile IPv6 are used for MinRtrAdvInterval and
+                MaxRtrAdvInterval.
+            adv_home_agent_info: When set, Home Agent Information Option
+                (specified by Mobile IPv6) is included in Router Advertisements.
+                adv_home_agent_flag must also be set when using this option.
+            home_agent_lifetime: The length of time in seconds (relative to the
+                time the packet is sent) that the router is offering Mobile IPv6
+                 Home Agent services. A value 0 must not be used. The maximum
+                 lifetime is 65520 seconds (18.2 hours). This option is ignored,
+                 if adv_home_agent_info is not set.
+            home_agent_preference: The preference for the Home Agent sending
+                this Router Advertisement. Values greater than 0 indicate more
+                preferable Home Agent, values less than 0 indicate less
+                preferable Home Agent. This option is ignored, if
+                adv_home_agent_info is not set.
+            adv_mob_rtr_support_flag: When set, the Home Agent signals it
+                supports Mobile Router registrations (specified by NEMO Basic).
+                adv_home_agent_info must also be set when using this option.
+            adv_interval_opt: When set, Advertisement Interval Option
+                (specified by Mobile IPv6) is included in Router Advertisements.
+                When set, minimum limits specified by Mobile IPv6 are used for
+                MinRtrAdvInterval and MaxRtrAdvInterval.
+            adv_on_linkWhen set, indicates that this prefix can be used for
+                on-link determination. When not set the advertisement makes no
+                statement about on-link or off-link properties of the prefix.
+                For instance, the prefix might be used for address configuration
+                 with some of the addresses belonging to the prefix being
+                 on-link and others being off-link.
+            adv_autonomous: When set, indicates that this prefix can be used for
+                autonomous address configuration as specified in RFC 4862.
+            adv_router_addr: When set, indicates that the address of interface
+                is sent instead of network prefix, as is required by Mobile
+                IPv6. When set, minimum limits specified by Mobile IPv6 are used
+                for MinRtrAdvInterval and MaxRtrAdvInterval.
+            adv_valid_lifetime: The length of time in seconds (relative to the
+                time the packet is sent) that the prefix is valid for the
+                purpose of on-link determination. The symbolic value infinity
+                represents infinity (i.e. a value of all one bits (0xffffffff)).
+                 The valid lifetime is also used by RFC 4862.
+            adv_preferred_lifetimeThe length of time in seconds (relative to the
+                time the packet is sent) that addresses generated from the
+                prefix via stateless address autoconfiguration remain preferred.
+                The symbolic value infinity represents infinity (i.e. a value of
+                all one bits (0xffffffff)). See RFC 4862.
+            base_6to4_interface: If this option is specified, this prefix will
+                be combined with the IPv4 address of interface name to produce
+                a valid 6to4 prefix. The first 16 bits of this prefix will be
+                replaced by 2002 and the next 32 bits of this prefix will be
+                replaced by the IPv4 address assigned to interface name at
+                configuration time. The remaining 80 bits of the prefix
+                (including the SLA ID) will be advertised as specified in the
+                configuration file.
+            adv_route_lifetime: The lifetime associated with the route in units
+                of seconds. The symbolic value infinity represents infinity
+                (i.e. a value of all one bits (0xffffffff)).
+            adv_route_preference: The preference associated with the default
+                router, as either "low", "medium", or "high".
+            adv_rdnss_preference: The preference of the DNS server, compared to
+                other DNS servers advertised and used. 0 to 7 means less
+                important than manually configured nameservers in resolv.conf,
+                while 12 to 15 means more important.
+            adv_rdnss_open: "Service Open" flag. When set, indicates that RDNSS
+                continues to be available to hosts even if they moved to a
+                different subnet.
+            adv_rdnss_lifetime: The maximum duration how long the RDNSS entries
+                are used for name resolution. A value of 0 means the nameserver
+                should no longer be used. The maximum duration how long the
+                RDNSS entries are used for name resolution. A value of 0 means
+                the nameserver should no longer be used. The value, if not 0,
+                must be at least max_rtr_adv_interval. To ensure stale RDNSS
+                info gets removed in a timely fashion, this should not be
+                greater than 2*max_rtr_adv_interval.
+        """
+        self._prefix = prefix
+        self._clients = clients
+        self._route = route
+        self._rdnss = rdnss
+        self._ignore_if_missing = ignore_if_missing
+        self._adv_send_advert = adv_send_advert
+        self._unicast_only = unicast_only
+        self._max_rtr_adv_interval = max_rtr_adv_interval
+        self._min_rtr_adv_interval = min_rtr_adv_interval
+        self._min_delay_between_ras = min_delay_between_ras
+        self._adv_managed_flag = adv_managed_flag
+        self._adv_other_config_flag = adv_other_config_flag
+        self._adv_link_mtu = adv_link_mtu
+        self._adv_reachable_time = adv_reachable_time
+        self._adv_retrans_timer = adv_retrans_timer
+        self._adv_cur_hop_limit = adv_cur_hop_limit
+        self._adv_default_lifetime = adv_default_lifetime
+        self._adv_default_preference = adv_default_preference
+        self._adv_source_ll_address = adv_source_ll_address
+        self._adv_home_agent_flag = adv_home_agent_flag
+        self._adv_home_agent_info = adv_home_agent_info
+        self._home_agent_lifetime = home_agent_lifetime
+        self._home_agent_preference = home_agent_preference
+        self._adv_mob_rtr_support_flag = adv_mob_rtr_support_flag
+        self._adv_interval_opt = adv_interval_opt
+        self._adv_on_link = adv_on_link
+        self._adv_autonomous = adv_autonomous
+        self._adv_router_addr = adv_router_addr
+        self._adv_valid_lifetime = adv_valid_lifetime
+        self._adv_preferred_lifetime = adv_preferred_lifetime
+        self._base_6to4_interface = base_6to4_interface
+        self._adv_route_lifetime = adv_route_lifetime
+        self._adv_route_preference = adv_route_preference
+        self._adv_rdnss_preference = adv_rdnss_preference
+        self._adv_rdnss_open = adv_rdnss_open
+        self._adv_rdnss_lifetime = adv_rdnss_lifetime
+
+    def package_configs(self):
+        conf: dict[str, Any] = dict()
+        conf["prefix"] = self._prefix
+        conf["clients"] = self._clients
+        conf["route"] = self._route
+        conf["rdnss"] = self._rdnss
+
+        conf["interface_options"] = collections.OrderedDict(
+            filter(
+                lambda pair: pair[1] is not None,
+                (
+                    ("IgnoreIfMissing", self._ignore_if_missing),
+                    ("AdvSendAdvert", self._adv_send_advert),
+                    ("UnicastOnly", self._unicast_only),
+                    ("MaxRtrAdvInterval", self._max_rtr_adv_interval),
+                    ("MinRtrAdvInterval", self._min_rtr_adv_interval),
+                    ("MinDelayBetweenRAs", self._min_delay_between_ras),
+                    ("AdvManagedFlag", self._adv_managed_flag),
+                    ("AdvOtherConfigFlag", self._adv_other_config_flag),
+                    ("AdvLinkMTU", self._adv_link_mtu),
+                    ("AdvReachableTime", self._adv_reachable_time),
+                    ("AdvRetransTimer", self._adv_retrans_timer),
+                    ("AdvCurHopLimit", self._adv_cur_hop_limit),
+                    ("AdvDefaultLifetime", self._adv_default_lifetime),
+                    ("AdvDefaultPreference", self._adv_default_preference),
+                    ("AdvSourceLLAddress", self._adv_source_ll_address),
+                    ("AdvHomeAgentFlag", self._adv_home_agent_flag),
+                    ("AdvHomeAgentInfo", self._adv_home_agent_info),
+                    ("HomeAgentLifetime", self._home_agent_lifetime),
+                    ("HomeAgentPreference", self._home_agent_preference),
+                    ("AdvMobRtrSupportFlag", self._adv_mob_rtr_support_flag),
+                    ("AdvIntervalOpt", self._adv_interval_opt),
+                ),
+            )
+        )
+
+        conf["prefix_options"] = collections.OrderedDict(
+            filter(
+                lambda pair: pair[1] is not None,
+                (
+                    ("AdvOnLink", self._adv_on_link),
+                    ("AdvAutonomous", self._adv_autonomous),
+                    ("AdvRouterAddr", self._adv_router_addr),
+                    ("AdvValidLifetime", self._adv_valid_lifetime),
+                    ("AdvPreferredLifetime", self._adv_preferred_lifetime),
+                    ("Base6to4Interface", self._base_6to4_interface),
+                ),
+            )
+        )
+
+        conf["route_options"] = collections.OrderedDict(
+            filter(
+                lambda pair: pair[1] is not None,
+                (
+                    ("AdvRouteLifetime", self._adv_route_lifetime),
+                    ("AdvRoutePreference", self._adv_route_preference),
+                ),
+            )
+        )
+
+        conf["rdnss_options"] = collections.OrderedDict(
+            filter(
+                lambda pair: pair[1] is not None,
+                (
+                    ("AdvRDNSSPreference", self._adv_rdnss_preference),
+                    ("AdvRDNSSOpen", self._adv_rdnss_open),
+                    ("AdvRDNSSLifetime", self._adv_rdnss_lifetime),
+                ),
+            )
+        )
+
+        return conf
diff --git a/packages/antlion/controllers/ap_lib/radvd_constants.py b/packages/antlion/controllers/ap_lib/radvd_constants.py
new file mode 100644
index 0000000..b02a694
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/radvd_constants.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+DEFAULT_PREFIX = "fd00::/64"
+
+IGNORE_IF_MISSING_ON = "on"
+IGNORE_IF_MISSING_OFF = "off"
+
+ADV_SEND_ADVERT_ON = "on"
+ADV_SEND_ADVERT_OFF = "off"
+
+UNICAST_ONLY_ON = "on"
+UNICAST_ONLY_OFF = "off"
+
+ADV_MANAGED_FLAG_ON = "on"
+ADV_MANAGED_FLAG_OFF = "off"
+
+ADV_OTHER_CONFIG_FLAG_ON = "on"
+ADV_OTHER_CONFIG_FLAG_OFF = "off"
+
+ADV_DEFAULT_PREFERENCE_ON = "on"
+ADV_DEFAULT_PREFERENCE_OFF = "off"
+
+ADV_SOURCE_LL_ADDRESS_ON = "on"
+ADV_SOURCE_LL_ADDRESS_OFF = "off"
+
+ADV_HOME_AGENT_FLAG_ON = "on"
+ADV_HOME_AGENT_FLAG_OFF = "off"
+
+ADV_HOME_AGENT_INFO_ON = "on"
+ADV_HOME_AGENT_INFO_OFF = "off"
+
+ADV_MOB_RTR_SUPPORT_FLAG_ON = "on"
+ADV_MOB_RTR_SUPPORT_FLAG_OFF = "off"
+
+ADV_INTERVAL_OPT_ON = "on"
+ADV_INTERVAL_OPT_OFF = "off"
+
+ADV_ON_LINK_ON = "on"
+ADV_ON_LINK_OFF = "off"
+
+ADV_AUTONOMOUS_ON = "on"
+ADV_AUTONOMOUS_OFF = "off"
+
+ADV_ROUTER_ADDR_ON = "on"
+ADV_ROUTER_ADDR_OFF = "off"
+
+ADV_ROUTE_PREFERENCE_LOW = "low"
+ADV_ROUTE_PREFERENCE_MED = "medium"
+ADV_ROUTE_PREFERENCE_HIGH = "high"
+
+ADV_RDNSS_OPEN_ON = "on"
+ADV_RDNSS_OPEN_OFF = "off"
diff --git a/packages/antlion/controllers/ap_lib/regulatory_channels.py b/packages/antlion/controllers/ap_lib/regulatory_channels.py
new file mode 100644
index 0000000..432607c
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/regulatory_channels.py
@@ -0,0 +1,710 @@
+from dataclasses import dataclass
+
+Channel = int
+Bandwidth = int
+# TODO(http://b/281728764): Add device requirements to each frequency e.g.
+# "MUST be used indoors only" or "MUST be used with DFS".
+ChannelBandwidthMap = dict[Channel, list[Bandwidth]]
+
+
+@dataclass
+class CountryChannels:
+    country_code: str
+    allowed_channels: ChannelBandwidthMap
+
+
+# All antlion-supported channels and frequencies for use in regulatory testing.
+TEST_CHANNELS: ChannelBandwidthMap = {
+    1: [20],
+    2: [20],
+    3: [20],
+    4: [20],
+    5: [20],
+    6: [20],
+    7: [20],
+    8: [20],
+    9: [20],
+    10: [20],
+    11: [20],
+    12: [20],
+    13: [20],
+    14: [20],
+    36: [20, 40, 80],
+    40: [20, 40, 80],
+    44: [20, 40, 80],
+    48: [20, 40, 80],
+    52: [20, 40, 80],
+    56: [20, 40, 80],
+    60: [20, 40, 80],
+    64: [20, 40, 80],
+    100: [20, 40, 80],
+    104: [20, 40, 80],
+    108: [20, 40, 80],
+    112: [20, 40, 80],
+    116: [20, 40, 80],
+    120: [20, 40, 80],
+    124: [20, 40, 80],
+    128: [20, 40, 80],
+    132: [20, 40, 80],
+    136: [20, 40, 80],
+    140: [20, 40, 80],
+    144: [20, 40, 80],
+    149: [20, 40, 80],
+    153: [20, 40, 80],
+    157: [20, 40, 80],
+    161: [20, 40, 80],
+    165: [20],
+}
+
+# All universally accepted 2.4GHz channels and frequencies.
+WORLD_WIDE_2G_CHANNELS: ChannelBandwidthMap = {
+    1: [20],
+    2: [20],
+    3: [20],
+    4: [20],
+    5: [20],
+    6: [20],
+    7: [20],
+    8: [20],
+    9: [20],
+    10: [20],
+    11: [20],
+}
+
+# List of supported channels and frequencies by country.
+#
+# Please keep this alphabetically ordered. Thanks!
+#
+# TODO: Add missing countries: Russia, Israel, Korea, Turkey, South Africa,
+# Brazil, Bahrain, Vietnam
+COUNTRY_CHANNELS = {
+    "Australia": CountryChannels(
+        country_code="AU",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+            144: [20, 40, 80],
+            149: [20, 40, 80],
+            153: [20, 40, 80],
+            157: [20, 40, 80],
+            161: [20, 40, 80],
+            165: [20],
+        },
+    ),
+    "Austria": CountryChannels(
+        country_code="AT",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "Belgium": CountryChannels(
+        country_code="BE",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "Canada": CountryChannels(
+        country_code="CA",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+            144: [20, 40, 80],
+            149: [20, 40, 80],
+            153: [20, 40, 80],
+            157: [20, 40, 80],
+            161: [20, 40, 80],
+            165: [20],
+        },
+    ),
+    "China": CountryChannels(
+        country_code="CH",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "Denmark": CountryChannels(
+        country_code="DK",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "France": CountryChannels(
+        country_code="FR",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "Germany": CountryChannels(
+        country_code="DE",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "India": CountryChannels(
+        country_code="IN",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+            144: [20, 40, 80],
+            149: [20, 40, 80],
+            153: [20, 40, 80],
+            157: [20, 40, 80],
+            161: [20, 40, 80],
+            165: [20],
+        },
+    ),
+    "Ireland": CountryChannels(
+        country_code="IE",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "Italy": CountryChannels(
+        country_code="IT",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "Japan": CountryChannels(
+        country_code="JP",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+            144: [20, 40, 80],
+        },
+    ),
+    "Mexico": CountryChannels(
+        country_code="MX",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+            144: [20, 40, 80],
+            149: [20, 40, 80],
+            153: [20, 40, 80],
+            157: [20, 40, 80],
+            161: [20, 40, 80],
+            165: [20],
+        },
+    ),
+    "Netherlands": CountryChannels(
+        country_code="NL",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "New Zealand": CountryChannels(
+        country_code="NZ",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+            144: [20, 40, 80],
+            149: [20, 40, 80],
+            153: [20, 40, 80],
+            157: [20, 40, 80],
+            161: [20, 40, 80],
+            165: [20],
+        },
+    ),
+    "Norway": CountryChannels(
+        country_code="NO",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "Singapore": CountryChannels(
+        country_code="SG",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+            144: [20, 40, 80],
+            149: [20, 40, 80],
+            153: [20, 40, 80],
+            157: [20, 40, 80],
+            161: [20, 40, 80],
+            165: [20],
+        },
+    ),
+    "Spain": CountryChannels(
+        country_code="ES",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "Sweden": CountryChannels(
+        country_code="SE",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "Taiwan": CountryChannels(
+        country_code="TW",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+            144: [20, 40, 80],
+            149: [20, 40, 80],
+            153: [20, 40, 80],
+            157: [20, 40, 80],
+            161: [20, 40, 80],
+            165: [20],
+        },
+    ),
+    "United Kingdom of Great Britain": CountryChannels(
+        country_code="GB",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            11: [20],
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "United States of America": CountryChannels(
+        country_code="US",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+            144: [20, 40, 80],
+            149: [20, 40, 80],
+            153: [20, 40, 80],
+            157: [20, 40, 80],
+            161: [20, 40, 80],
+            165: [20],
+        },
+    ),
+}
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/__init__.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/__init__.py
similarity index 100%
rename from src/antlion/controllers/ap_lib/third_party_ap_profiles/__init__.py
rename to packages/antlion/controllers/ap_lib/third_party_ap_profiles/__init__.py
diff --git a/packages/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py
new file mode 100644
index 0000000..f04f60b
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py
@@ -0,0 +1,150 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from antlion.controllers.ap_lib import hostapd_config, hostapd_constants, hostapd_utils
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+
+
+def actiontec_pk5000(
+    iface_wlan_2g: str, channel: int, security: Security, ssid: str | None = None
+) -> hostapd_config.HostapdConfig:
+    """A simulated implementation of what a Actiontec PK5000 AP
+    Args:
+        iface_wlan_2g: The 2.4 interface of the test AP.
+        channel: What channel to use.  Only 2.4Ghz is supported for this profile
+        security: A security profile.  Must be open or WPA2 as this is what is
+            supported by the PK5000.
+        ssid: Network name
+    Returns:
+        A hostapd config
+
+    Differences from real pk5000:
+        Supported Rates IE:
+            PK5000: Supported: 1, 2, 5.5, 11
+                    Extended: 6, 9, 12, 18, 24, 36, 48, 54
+            Simulated: Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
+                       Extended: 24, 36, 48, 54
+    """
+    if channel > 11:
+        # Technically this should be 14 but since the PK5000 is a US only AP,
+        # 11 is the highest allowable channel.
+        raise ValueError(
+            f"The Actiontec PK5000 does not support 5Ghz. Invalid channel ({channel})"
+        )
+    # Verify interface and security
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
+
+    interface = iface_wlan_2g
+    short_preamble = False
+    force_wmm = False
+    beacon_interval = 100
+    dtim_period = 3
+    # Sets the basic rates and supported rates of the PK5000
+    additional_params = (
+        hostapd_constants.CCK_AND_OFDM_BASIC_RATES
+        | hostapd_constants.CCK_AND_OFDM_DATA_RATES
+    )
+
+    config = hostapd_config.HostapdConfig(
+        ssid=ssid,
+        channel=channel,
+        hidden=False,
+        security=security,
+        interface=interface,
+        mode=hostapd_constants.MODE_11G,
+        force_wmm=force_wmm,
+        beacon_interval=beacon_interval,
+        dtim_period=dtim_period,
+        short_preamble=short_preamble,
+        additional_parameters=additional_params,
+    )
+
+    return config
+
+
+def actiontec_mi424wr(
+    iface_wlan_2g: str, channel: int, security: Security, ssid: str | None = None
+) -> hostapd_config.HostapdConfig:
+    # TODO(b/143104825): Permit RIFS once it is supported
+    """A simulated implementation of an Actiontec MI424WR AP.
+    Args:
+        iface_wlan_2g: The 2.4Ghz interface of the test AP.
+        channel: What channel to use (2.4Ghz or 5Ghz).
+        security: A security profile.
+        ssid: The network name.
+    Returns:
+        A hostapd config.
+
+    Differences from real MI424WR:
+        HT Capabilities:
+            MI424WR:
+                HT Rx STBC: Support for 1, 2, and 3
+            Simulated:
+                HT Rx STBC: Support for 1
+        HT Information:
+            MI424WR:
+                RIFS: Premitted
+            Simulated:
+                RIFS: Prohibited
+    """
+    if channel > 11:
+        raise ValueError(
+            f"The Actiontec MI424WR does not support 5Ghz. Invalid channel ({channel})"
+        )
+    # Verify interface and security
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
+
+    n_capabilities = [
+        hostapd_constants.N_CAPABILITY_TX_STBC,
+        hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
+        hostapd_constants.N_CAPABILITY_RX_STBC1,
+    ]
+    rates = (
+        hostapd_constants.CCK_AND_OFDM_DATA_RATES
+        | hostapd_constants.CCK_AND_OFDM_BASIC_RATES
+    )
+    # Proprietary Atheros Communication: Adv Capability IE
+    # Proprietary Atheros Communication: Unknown IE
+    # Country Info: US Only IE
+    vendor_elements = {
+        "vendor_elements": "dd0900037f01010000ff7f"
+        "dd0a00037f04010000000000"
+        "0706555320010b1b"
+    }
+
+    additional_params = rates | vendor_elements
+
+    config = hostapd_config.HostapdConfig(
+        ssid=ssid,
+        channel=channel,
+        hidden=False,
+        security=security,
+        interface=iface_wlan_2g,
+        mode=hostapd_constants.MODE_11N_MIXED,
+        force_wmm=True,
+        beacon_interval=100,
+        dtim_period=1,
+        short_preamble=True,
+        n_capabilities=n_capabilities,
+        additional_parameters=additional_params,
+    )
+
+    return config
diff --git a/packages/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py
new file mode 100644
index 0000000..6a9ae27
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py
@@ -0,0 +1,554 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from antlion.controllers.ap_lib import hostapd_config, hostapd_constants, hostapd_utils
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+
+
+def asus_rtac66u(
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
+    # TODO(b/143104825): Permit RIFS once it is supported
+    # TODO(b/144446076): Address non-whirlwind hardware capabilities.
+    """A simulated implementation of an Asus RTAC66U AP.
+    Args:
+        iface_wlan_2g: The 2.4Ghz interface of the test AP.
+        iface_wlan_5g: The 5Ghz interface of the test AP.
+        channel: What channel to use.
+        security: A security profile.  Must be open or WPA2 as this is what is
+            supported by the RTAC66U.
+        ssid: Network name
+    Returns:
+        A hostapd config
+    Differences from real RTAC66U:
+        2.4 GHz:
+            Rates:
+                RTAC66U:
+                    Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
+                    Extended: 6, 9, 12, 48
+                Simulated:
+                    Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
+                    Extended: 24, 36, 48, 54
+            HT Capab:
+                Info
+                    RTAC66U: Green Field supported
+                    Simulated: Green Field not supported on Whirlwind.
+        5GHz:
+            VHT Capab:
+                RTAC66U:
+                    SU Beamformer Supported,
+                    SU Beamformee Supported,
+                    Beamformee STS Capability: 3,
+                    Number of Sounding Dimensions: 3,
+                    VHT Link Adaptation: Both
+                Simulated:
+                    Above are not supported on Whirlwind.
+            VHT Operation Info:
+                RTAC66U: Basic MCS Map (0x0000)
+                Simulated: Basic MCS Map (0xfffc)
+            VHT Tx Power Envelope:
+                RTAC66U: Local Max Tx Pwr Constraint: 1.0 dBm
+                Simulated: Local Max Tx Pwr Constraint: 23.0 dBm
+        Both:
+            HT Capab:
+                A-MPDU
+                    RTAC66U: MPDU Density 4
+                    Simulated: MPDU Density 8
+            HT Info:
+                RTAC66U: RIFS Permitted
+                Simulated: RIFS Prohibited
+    """
+    # Verify interface and security
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
+
+    # Common Parameters
+    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
+    vht_channel_width = 20
+    n_capabilities = [
+        hostapd_constants.N_CAPABILITY_LDPC,
+        hostapd_constants.N_CAPABILITY_TX_STBC,
+        hostapd_constants.N_CAPABILITY_RX_STBC1,
+        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
+        hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
+        hostapd_constants.N_CAPABILITY_SGI20,
+    ]
+    # WPS IE
+    # Broadcom IE
+    vendor_elements = {
+        "vendor_elements": "dd310050f204104a00011010440001021047001093689729d373c26cb1563c6c570f33"
+        "d7103c0001031049000600372a000120"
+        "dd090010180200001c0000"
+    }
+
+    # 2.4GHz
+    if channel <= 11:
+        interface = iface_wlan_2g
+        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
+        mode = hostapd_constants.MODE_11N_MIXED
+        ac_capabilities = None
+
+    # 5GHz
+    else:
+        interface = iface_wlan_5g
+        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
+        mode = hostapd_constants.MODE_11AC_MIXED
+        ac_capabilities = [
+            hostapd_constants.AC_CAPABILITY_RXLDPC,
+            hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
+            hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
+            hostapd_constants.AC_CAPABILITY_RX_STBC_1,
+            hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
+            hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
+        ]
+
+    additional_params = rates | vendor_elements | hostapd_constants.UAPSD_ENABLED
+
+    config = hostapd_config.HostapdConfig(
+        ssid=ssid,
+        channel=channel,
+        hidden=False,
+        security=security,
+        interface=interface,
+        mode=mode,
+        force_wmm=True,
+        beacon_interval=100,
+        dtim_period=3,
+        short_preamble=False,
+        n_capabilities=n_capabilities,
+        ac_capabilities=ac_capabilities,
+        vht_channel_width=vht_channel_width,
+        additional_parameters=additional_params,
+    )
+
+    return config
+
+
+def asus_rtac86u(
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
+    """A simulated implementation of an Asus RTAC86U AP.
+    Args:
+        iface_wlan_2g: The 2.4Ghz interface of the test AP.
+        iface_wlan_5g: The 5Ghz interface of the test AP.
+        channel: What channel to use.
+        security: A security profile.  Must be open or WPA2 as this is what is
+            supported by the RTAC86U.
+        ssid: Network name
+    Returns:
+        A hostapd config
+    Differences from real RTAC86U:
+        2.4GHz:
+            Rates:
+                RTAC86U:
+                    Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
+                    Extended: 6, 9, 12, 48
+                Simulated:
+                    Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
+                    Extended: 24, 36, 48, 54
+        5GHz:
+            Country Code:
+                Simulated: Has two country code IEs, one that matches
+                the actual, and another explicit IE that was required for
+                hostapd's 802.11d to work.
+        Both:
+            RSN Capabilities (w/ WPA2):
+                RTAC86U:
+                    RSN PTKSA Replay Counter Capab: 16
+                Simulated:
+                    RSN PTKSA Replay Counter Capab: 1
+    """
+    # Verify interface and security
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
+
+    # Common Parameters
+    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
+    qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
+
+    # 2.4GHz
+    if channel <= 11:
+        interface = iface_wlan_2g
+        mode = hostapd_constants.MODE_11G
+        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
+        spectrum_mgmt = False
+        # Measurement Pilot Transmission IE
+        vendor_elements = {"vendor_elements": "42020000"}
+
+    # 5GHz
+    else:
+        interface = iface_wlan_5g
+        mode = hostapd_constants.MODE_11A
+        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
+        spectrum_mgmt = True
+        # Country Information IE (w/ individual channel info)
+        # TPC Report Transmit Power IE
+        # Measurement Pilot Transmission IE
+        vendor_elements = {
+            "vendor_elements": "074255532024011e28011e2c011e30011e34011e38011e3c011e40011e64011e"
+            "68011e6c011e70011e74011e84011e88011e8c011e95011e99011e9d011ea1011e"
+            "a5011e"
+            "23021300"
+            "42020000"
+        }
+
+    additional_params = rates | qbss | vendor_elements
+
+    config = hostapd_config.HostapdConfig(
+        ssid=ssid,
+        channel=channel,
+        hidden=False,
+        security=security,
+        interface=interface,
+        mode=mode,
+        force_wmm=False,
+        beacon_interval=100,
+        dtim_period=3,
+        short_preamble=False,
+        spectrum_mgmt_required=spectrum_mgmt,
+        additional_parameters=additional_params,
+    )
+    return config
+
+
+def asus_rtac5300(
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
+    # TODO(b/143104825): Permit RIFS once it is supported
+    # TODO(b/144446076): Address non-whirlwind hardware capabilities.
+    """A simulated implementation of an Asus RTAC5300 AP.
+    Args:
+        iface_wlan_2g: The 2.4Ghz interface of the test AP.
+        iface_wlan_5g: The 5Ghz interface of the test AP.
+        channel: What channel to use.
+        security: A security profile.  Must be open or WPA2 as this is what is
+            supported by the RTAC5300.
+        ssid: Network name
+    Returns:
+        A hostapd config
+    Differences from real RTAC5300:
+        2.4GHz:
+            Rates:
+                RTAC86U:
+                    Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
+                    Extended: 6, 9, 12, 48
+                Simulated:
+                    Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
+                    Extended: 24, 36, 48, 54
+        5GHz:
+            VHT Capab:
+                RTAC5300:
+                    SU Beamformer Supported,
+                    SU Beamformee Supported,
+                    Beamformee STS Capability: 4,
+                    Number of Sounding Dimensions: 4,
+                    MU Beamformer Supported,
+                    VHT Link Adaptation: Both
+                Simulated:
+                    Above are not supported on Whirlwind.
+            VHT Operation Info:
+                RTAC5300: Basic MCS Map (0x0000)
+                Simulated: Basic MCS Map (0xfffc)
+            VHT Tx Power Envelope:
+                RTAC5300: Local Max Tx Pwr Constraint: 1.0 dBm
+                Simulated: Local Max Tx Pwr Constraint: 23.0 dBm
+        Both:
+            HT Capab:
+                A-MPDU
+                    RTAC5300: MPDU Density 4
+                    Simulated: MPDU Density 8
+            HT Info:
+                RTAC5300: RIFS Permitted
+                Simulated: RIFS Prohibited
+    """
+    # Verify interface and security
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
+
+    # Common Parameters
+    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
+    vht_channel_width = 20
+    qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
+    n_capabilities = [
+        hostapd_constants.N_CAPABILITY_LDPC,
+        hostapd_constants.N_CAPABILITY_TX_STBC,
+        hostapd_constants.N_CAPABILITY_RX_STBC1,
+        hostapd_constants.N_CAPABILITY_SGI20,
+    ]
+
+    # Broadcom IE
+    vendor_elements = {"vendor_elements": "dd090010180200009c0000"}
+
+    # 2.4GHz
+    if channel <= 11:
+        interface = iface_wlan_2g
+        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
+        mode = hostapd_constants.MODE_11N_MIXED
+        # AsusTek IE
+        # Epigram 2.4GHz IE
+        vendor_elements["vendor_elements"] += (
+            "dd25f832e4010101020100031411b5"
+            "2fd437509c30b3d7f5cf5754fb125aed3b8507045aed3b85"
+            "dd1e00904c0418bf0cb2798b0faaff0000aaff0000c0050001000000c3020002"
+        )
+        ac_capabilities = None
+
+    # 5GHz
+    else:
+        interface = iface_wlan_5g
+        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
+        mode = hostapd_constants.MODE_11AC_MIXED
+        # Epigram 5GHz IE
+        vendor_elements["vendor_elements"] += "dd0500904c0410"
+        ac_capabilities = [
+            hostapd_constants.AC_CAPABILITY_RXLDPC,
+            hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
+            hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
+            hostapd_constants.AC_CAPABILITY_RX_STBC_1,
+            hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
+            hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
+        ]
+
+    additional_params = rates | qbss | vendor_elements | hostapd_constants.UAPSD_ENABLED
+
+    config = hostapd_config.HostapdConfig(
+        ssid=ssid,
+        channel=channel,
+        hidden=False,
+        security=security,
+        interface=interface,
+        mode=mode,
+        force_wmm=True,
+        beacon_interval=100,
+        dtim_period=3,
+        short_preamble=False,
+        n_capabilities=n_capabilities,
+        ac_capabilities=ac_capabilities,
+        vht_channel_width=vht_channel_width,
+        additional_parameters=additional_params,
+    )
+    return config
+
+
+def asus_rtn56u(
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
+    """A simulated implementation of an Asus RTN56U AP.
+    Args:
+        iface_wlan_2g: The 2.4Ghz interface of the test AP.
+        iface_wlan_5g: The 5Ghz interface of the test AP.
+        channel: What channel to use.
+        security: A security profile.  Must be open or WPA2 as this is what is
+            supported by the RTN56U.
+        ssid: Network name
+    Returns:
+        A hostapd config
+    Differences from real RTN56U:
+        2.4GHz:
+            Rates:
+                RTN56U:
+                    Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
+                    Extended: 6, 9, 12, 48
+                Simulated:
+                    Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
+                    Extended: 24, 36, 48, 54
+        Both:
+            Fixed Parameters:
+                RTN56U: APSD Implemented
+                Simulated: APSD Not Implemented
+            HT Capab:
+                A-MPDU
+                    RTN56U: MPDU Density 4
+                    Simulated: MPDU Density 8
+            RSN Capabilities (w/ WPA2):
+                RTN56U:
+                    RSN PTKSA Replay Counter Capab: 1
+                Simulated:
+                    RSN PTKSA Replay Counter Capab: 16
+    """
+    # Verify interface and security
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
+
+    # Common Parameters
+    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
+    qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
+    n_capabilities = [
+        hostapd_constants.N_CAPABILITY_SGI20,
+        hostapd_constants.N_CAPABILITY_SGI40,
+        hostapd_constants.N_CAPABILITY_TX_STBC,
+        hostapd_constants.N_CAPABILITY_RX_STBC1,
+    ]
+
+    # 2.4GHz
+    if channel <= 11:
+        interface = iface_wlan_2g
+        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
+        # Ralink Technology IE
+        # US Country Code IE
+        # AP Channel Report IEs (2)
+        # WPS IE
+        vendor_elements = {
+            "vendor_elements": "dd07000c4307000000"
+            "0706555320010b14"
+            "33082001020304050607"
+            "33082105060708090a0b"
+            "dd270050f204104a000110104400010210470010bc329e001dd811b286011c872c"
+            "d33448103c000101"
+        }
+
+    # 5GHz
+    else:
+        interface = iface_wlan_5g
+        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
+        # Ralink Technology IE
+        # US Country Code IE
+        vendor_elements = {"vendor_elements": "dd07000c4307000000" "0706555320010b14"}
+
+    additional_params = rates | vendor_elements | qbss | hostapd_constants.UAPSD_ENABLED
+
+    config = hostapd_config.HostapdConfig(
+        ssid=ssid,
+        channel=channel,
+        hidden=False,
+        security=security,
+        interface=interface,
+        mode=hostapd_constants.MODE_11N_MIXED,
+        force_wmm=True,
+        beacon_interval=100,
+        dtim_period=1,
+        short_preamble=False,
+        n_capabilities=n_capabilities,
+        additional_parameters=additional_params,
+    )
+
+    return config
+
+
+def asus_rtn66u(
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
+    # TODO(b/143104825): Permit RIFS once it is supported
+    """A simulated implementation of an Asus RTN66U AP.
+    Args:
+        iface_wlan_2g: The 2.4Ghz interface of the test AP.
+        iface_wlan_5g: The 5Ghz interface of the test AP.
+        channel: What channel to use.
+        security: A security profile.  Must be open or WPA2 as this is what is
+            supported by the RTN66U.
+        ssid: Network name
+    Returns:
+        A hostapd config
+    Differences from real RTN66U:
+        2.4GHz:
+            Rates:
+                RTN66U:
+                    Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
+                    Extended: 6, 9, 12, 48
+                Simulated:
+                    Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
+                    Extended: 24, 36, 48, 54
+        Both:
+            HT Info:
+                RTN66U: RIFS Permitted
+                Simulated: RIFS Prohibited
+            HT Capab:
+                Info:
+                    RTN66U: Green Field supported
+                    Simulated: Green Field not supported on Whirlwind.
+                A-MPDU
+                    RTN66U: MPDU Density 4
+                    Simulated: MPDU Density 8
+    """
+    # Verify interface and security
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
+
+    # Common Parameters
+    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
+    n_capabilities = [
+        hostapd_constants.N_CAPABILITY_LDPC,
+        hostapd_constants.N_CAPABILITY_SGI20,
+        hostapd_constants.N_CAPABILITY_TX_STBC,
+        hostapd_constants.N_CAPABILITY_RX_STBC1,
+        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
+    ]
+    # Broadcom IE
+    vendor_elements = {"vendor_elements": "dd090010180200001c0000"}
+
+    # 2.4GHz
+    if channel <= 11:
+        interface = iface_wlan_2g
+        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
+        n_capabilities.append(hostapd_constants.N_CAPABILITY_DSSS_CCK_40)
+
+    # 5GHz
+    else:
+        interface = iface_wlan_5g
+        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
+
+    additional_params = rates | vendor_elements | hostapd_constants.UAPSD_ENABLED
+
+    config = hostapd_config.HostapdConfig(
+        ssid=ssid,
+        channel=channel,
+        hidden=False,
+        security=security,
+        interface=interface,
+        mode=hostapd_constants.MODE_11N_MIXED,
+        force_wmm=True,
+        beacon_interval=100,
+        dtim_period=3,
+        short_preamble=False,
+        n_capabilities=n_capabilities,
+        additional_parameters=additional_params,
+    )
+
+    return config
diff --git a/packages/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py
new file mode 100644
index 0000000..62a9d66
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py
@@ -0,0 +1,98 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from antlion.controllers.ap_lib import hostapd_config, hostapd_constants, hostapd_utils
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+
+
+def belkin_f9k1001v5(
+    iface_wlan_2g: str, channel: int, security: Security, ssid: str | None = None
+) -> hostapd_config.HostapdConfig:
+    # TODO(b/143104825): Permit RIFS once it is supported
+    """A simulated implementation of what a Belkin F9K1001v5 AP
+    Args:
+        iface_wlan_2g: The 2.4Ghz interface of the test AP.
+        channel: What channel to use.
+        security: A security profile (open or WPA2).
+        ssid: The network name.
+    Returns:
+        A hostapd config.
+    Differences from real F9K1001v5:
+        Rates:
+            F9K1001v5:
+                Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
+                Extended: 6, 9, 12, 48
+            Simulated:
+                Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
+                Extended: 24, 36, 48, 54
+        HT Info:
+            F9K1001v5:
+                RIFS: Permitted
+            Simulated:
+                RIFS: Prohibited
+        RSN Capabilities (w/ WPA2):
+            F9K1001v5:
+                RSN PTKSA Replay Counter Capab: 1
+            Simulated:
+                RSN PTKSA Replay Counter Capab: 16
+    """
+    if channel > 11:
+        raise ValueError(
+            f"The Belkin F9k1001v5 does not support 5Ghz. Invalid channel ({channel})"
+        )
+    # Verify interface and security
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
+
+    n_capabilities = [
+        hostapd_constants.N_CAPABILITY_SGI20,
+        hostapd_constants.N_CAPABILITY_SGI40,
+        hostapd_constants.N_CAPABILITY_TX_STBC,
+        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
+        hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
+    ]
+
+    rates = (
+        hostapd_constants.CCK_AND_OFDM_BASIC_RATES
+        | hostapd_constants.CCK_AND_OFDM_DATA_RATES
+    )
+
+    # Broadcom IE
+    # WPS IE
+    vendor_elements = {
+        "vendor_elements": "dd090010180200100c0000"
+        "dd180050f204104a00011010440001021049000600372a000120"
+    }
+
+    additional_params = rates | vendor_elements
+
+    config = hostapd_config.HostapdConfig(
+        ssid=ssid,
+        channel=channel,
+        hidden=False,
+        security=security,
+        interface=iface_wlan_2g,
+        mode=hostapd_constants.MODE_11N_MIXED,
+        force_wmm=True,
+        beacon_interval=100,
+        dtim_period=3,
+        short_preamble=False,
+        n_capabilities=n_capabilities,
+        additional_parameters=additional_params,
+    )
+
+    return config
diff --git a/packages/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py
new file mode 100644
index 0000000..21f3fb1
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py
@@ -0,0 +1,305 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from antlion.controllers.ap_lib import hostapd_config, hostapd_constants, hostapd_utils
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+
+
+def linksys_ea4500(
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
+    # TODO(b/143104825): Permit RIFS once it is supported
+    # TODO(b/144446076): Address non-whirlwind hardware capabilities.
+    """A simulated implementation of what a Linksys EA4500 AP
+    Args:
+        iface_wlan_2g: The 2.4Ghz interface of the test AP.
+        iface_wlan_5g: The 5GHz interface of the test AP.
+        channel: What channel to use.
+        security: A security profile (open or WPA2).
+        ssid: The network name.
+    Returns:
+        A hostapd config.
+    Differences from real EA4500:
+        CF (Contention-Free) Parameter IE:
+            EA4500: has CF Parameter IE
+            Simulated: does not have CF Parameter IE
+        HT Capab:
+            Info:
+                EA4500: Green Field supported
+                Simulated: Green Field not supported on Whirlwind.
+            A-MPDU
+                RTAC66U: MPDU Density 4
+                Simulated: MPDU Density 8
+        RSN Capab (w/ WPA2):
+            EA4500:
+                RSN PTKSA Replay Counter Capab: 1
+            Simulated:
+                RSN PTKSA Replay Counter Capab: 16
+    """
+    # Verify interface and security
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
+
+    # Common Parameters
+    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
+
+    n_capabilities = [
+        hostapd_constants.N_CAPABILITY_SGI20,
+        hostapd_constants.N_CAPABILITY_SGI40,
+        hostapd_constants.N_CAPABILITY_TX_STBC,
+        hostapd_constants.N_CAPABILITY_RX_STBC1,
+        hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
+    ]
+
+    # Epigram HT Capabilities IE
+    # Epigram HT Additional Capabilities IE
+    # Marvell Semiconductor, Inc. IE
+    vendor_elements = {
+        "vendor_elements": "dd1e00904c33fc0117ffffff0000000000000000000000000000000000000000"
+        "dd1a00904c3424000000000000000000000000000000000000000000"
+        "dd06005043030000"
+    }
+
+    # 2.4GHz
+    if channel <= 11:
+        interface = iface_wlan_2g
+        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
+        obss_interval = 180
+        n_capabilities.append(hostapd_constants.N_CAPABILITY_HT40_PLUS)
+
+    # 5GHz
+    else:
+        interface = iface_wlan_5g
+        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
+        obss_interval = None
+
+    additional_params = rates | vendor_elements | hostapd_constants.UAPSD_ENABLED
+
+    config = hostapd_config.HostapdConfig(
+        ssid=ssid,
+        channel=channel,
+        hidden=False,
+        security=security,
+        interface=interface,
+        mode=hostapd_constants.MODE_11N_MIXED,
+        force_wmm=True,
+        beacon_interval=100,
+        dtim_period=1,
+        short_preamble=True,
+        obss_interval=obss_interval,
+        n_capabilities=n_capabilities,
+        additional_parameters=additional_params,
+    )
+
+    return config
+
+
+def linksys_ea9500(
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
+    """A simulated implementation of what a Linksys EA9500 AP
+    Args:
+        iface_wlan_2g: The 2.4Ghz interface of the test AP.
+        iface_wlan_5g: The 5GHz interface of the test AP.
+        channel: What channel to use.
+        security: A security profile (open or WPA2).
+        ssid: The network name.
+    Returns:
+        A hostapd config.
+    Differences from real EA9500: